var/home/core/zuul-output/0000755000175000017500000000000015114477327014540 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114524275015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006416611215114524266017712 0ustar rootrootDec 05 06:46:11 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 06:46:11 crc restorecon[4749]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:11 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 06:46:12 crc restorecon[4749]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 06:46:12 crc kubenswrapper[4863]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.403498 4863 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406223 4863 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406241 4863 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406245 4863 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406250 4863 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406254 4863 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406257 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406268 4863 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406272 4863 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406277 4863 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406282 4863 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406286 4863 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406291 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406295 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406298 4863 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406302 4863 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406305 4863 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406309 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406313 4863 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406316 4863 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406320 4863 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406323 4863 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406327 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406330 4863 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406335 4863 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406340 4863 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406344 4863 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406347 4863 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406352 4863 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406356 4863 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406360 4863 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406365 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406369 4863 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406373 4863 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406377 4863 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406381 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406384 4863 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406388 4863 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406393 4863 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406397 4863 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406402 4863 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406406 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406413 4863 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406417 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406421 4863 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406425 4863 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406428 4863 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406431 4863 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406435 4863 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406439 4863 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406444 4863 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406447 4863 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406451 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406454 4863 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406458 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406461 4863 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406465 4863 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406481 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406485 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406488 4863 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406492 4863 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406495 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406499 4863 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406502 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406506 4863 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406510 4863 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406513 4863 feature_gate.go:330] unrecognized feature gate: Example Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406519 4863 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406522 4863 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406526 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406529 4863 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.406533 4863 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406602 4863 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406610 4863 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406616 4863 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406621 4863 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406626 4863 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406632 4863 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406638 4863 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406643 4863 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406647 4863 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406652 4863 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406656 4863 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406660 4863 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406665 4863 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406669 4863 flags.go:64] FLAG: --cgroup-root="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406673 4863 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406677 4863 flags.go:64] FLAG: --client-ca-file="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406681 4863 flags.go:64] FLAG: --cloud-config="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406685 4863 flags.go:64] FLAG: --cloud-provider="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406688 4863 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406694 4863 flags.go:64] FLAG: --cluster-domain="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406698 4863 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406702 4863 flags.go:64] FLAG: --config-dir="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406706 4863 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406710 4863 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406716 4863 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406720 4863 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406724 4863 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406728 4863 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406732 4863 flags.go:64] FLAG: --contention-profiling="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406737 4863 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406741 4863 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406745 4863 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406749 4863 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406753 4863 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406757 4863 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406762 4863 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406766 4863 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406770 4863 flags.go:64] FLAG: --enable-server="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406774 4863 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406780 4863 flags.go:64] FLAG: --event-burst="100" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406784 4863 flags.go:64] FLAG: --event-qps="50" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406789 4863 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406793 4863 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406798 4863 flags.go:64] FLAG: --eviction-hard="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406803 4863 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406808 4863 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406812 4863 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406817 4863 flags.go:64] FLAG: --eviction-soft="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406821 4863 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406825 4863 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406830 4863 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406834 4863 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406838 4863 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406842 4863 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406845 4863 flags.go:64] FLAG: --feature-gates="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406850 4863 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406854 4863 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406859 4863 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406863 4863 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406868 4863 flags.go:64] FLAG: --healthz-port="10248" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406872 4863 flags.go:64] FLAG: --help="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406876 4863 flags.go:64] FLAG: --hostname-override="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406880 4863 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406884 4863 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406888 4863 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406892 4863 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406896 4863 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406900 4863 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406904 4863 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406909 4863 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406912 4863 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406917 4863 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406921 4863 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406925 4863 flags.go:64] FLAG: --kube-reserved="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406929 4863 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406933 4863 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406937 4863 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406945 4863 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406949 4863 flags.go:64] FLAG: --lock-file="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406953 4863 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406957 4863 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406961 4863 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406967 4863 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406971 4863 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406975 4863 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406979 4863 flags.go:64] FLAG: --logging-format="text" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406983 4863 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406987 4863 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406991 4863 flags.go:64] FLAG: --manifest-url="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.406995 4863 flags.go:64] FLAG: --manifest-url-header="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407000 4863 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407004 4863 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407009 4863 flags.go:64] FLAG: --max-pods="110" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407013 4863 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407017 4863 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407021 4863 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407025 4863 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407029 4863 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407034 4863 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407038 4863 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407047 4863 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407052 4863 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407056 4863 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407060 4863 flags.go:64] FLAG: --pod-cidr="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407064 4863 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407072 4863 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407076 4863 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407081 4863 flags.go:64] FLAG: --pods-per-core="0" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407084 4863 flags.go:64] FLAG: --port="10250" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407090 4863 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407094 4863 flags.go:64] FLAG: --provider-id="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407098 4863 flags.go:64] FLAG: --qos-reserved="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407103 4863 flags.go:64] FLAG: --read-only-port="10255" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407107 4863 flags.go:64] FLAG: --register-node="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407111 4863 flags.go:64] FLAG: --register-schedulable="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407115 4863 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407126 4863 flags.go:64] FLAG: --registry-burst="10" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407130 4863 flags.go:64] FLAG: --registry-qps="5" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407134 4863 flags.go:64] FLAG: --reserved-cpus="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407138 4863 flags.go:64] FLAG: --reserved-memory="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407143 4863 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407146 4863 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407151 4863 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407155 4863 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407159 4863 flags.go:64] FLAG: --runonce="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407163 4863 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407167 4863 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407171 4863 flags.go:64] FLAG: --seccomp-default="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407175 4863 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407179 4863 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407183 4863 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407188 4863 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407192 4863 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407196 4863 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407200 4863 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407204 4863 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407208 4863 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407212 4863 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407216 4863 flags.go:64] FLAG: --system-cgroups="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407220 4863 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407226 4863 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407236 4863 flags.go:64] FLAG: --tls-cert-file="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407240 4863 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407245 4863 flags.go:64] FLAG: --tls-min-version="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407249 4863 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407253 4863 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407257 4863 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407261 4863 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407265 4863 flags.go:64] FLAG: --v="2" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407271 4863 flags.go:64] FLAG: --version="false" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407277 4863 flags.go:64] FLAG: --vmodule="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407281 4863 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407286 4863 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407395 4863 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407400 4863 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407404 4863 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407408 4863 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407412 4863 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407416 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407420 4863 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407423 4863 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407427 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407431 4863 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407436 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407440 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407444 4863 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407448 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407452 4863 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407455 4863 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407459 4863 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407462 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407478 4863 feature_gate.go:330] unrecognized feature gate: Example Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407482 4863 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407487 4863 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407491 4863 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407494 4863 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407498 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407501 4863 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407504 4863 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407508 4863 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407511 4863 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407515 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407518 4863 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407522 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407525 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407529 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407533 4863 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407536 4863 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407540 4863 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407543 4863 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407547 4863 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407550 4863 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407554 4863 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407557 4863 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407560 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407564 4863 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407569 4863 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407574 4863 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407578 4863 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407582 4863 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407585 4863 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407590 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407593 4863 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407597 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407600 4863 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407606 4863 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407610 4863 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407614 4863 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407618 4863 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407621 4863 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407625 4863 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407629 4863 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407632 4863 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407636 4863 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407639 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407642 4863 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407646 4863 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407649 4863 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407653 4863 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407656 4863 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407661 4863 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407666 4863 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407670 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.407674 4863 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.407856 4863 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.419147 4863 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.419190 4863 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419332 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419346 4863 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419356 4863 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419368 4863 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419384 4863 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419393 4863 feature_gate.go:330] unrecognized feature gate: Example Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419402 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419410 4863 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419419 4863 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419430 4863 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419439 4863 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419447 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419455 4863 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419463 4863 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419505 4863 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419514 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419524 4863 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419533 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419543 4863 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419555 4863 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419565 4863 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419575 4863 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419584 4863 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419593 4863 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419601 4863 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419610 4863 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419618 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419628 4863 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419635 4863 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419643 4863 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419651 4863 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419659 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419667 4863 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419674 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419682 4863 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419690 4863 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419698 4863 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419706 4863 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419715 4863 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419724 4863 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419732 4863 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419739 4863 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419747 4863 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419758 4863 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419768 4863 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419778 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419789 4863 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419800 4863 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419808 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419817 4863 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419825 4863 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419835 4863 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419843 4863 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419852 4863 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419859 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419867 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419875 4863 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419883 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419892 4863 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419900 4863 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419908 4863 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419916 4863 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419923 4863 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419931 4863 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419939 4863 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419946 4863 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419954 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419962 4863 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419969 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419977 4863 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.419984 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.419998 4863 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420229 4863 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420241 4863 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420251 4863 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420263 4863 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420274 4863 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420283 4863 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420292 4863 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420300 4863 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420309 4863 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420317 4863 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420325 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420332 4863 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420340 4863 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420348 4863 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420356 4863 feature_gate.go:330] unrecognized feature gate: Example Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420363 4863 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420370 4863 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420378 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420386 4863 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420394 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420402 4863 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420409 4863 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420417 4863 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420424 4863 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420433 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420440 4863 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420448 4863 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420456 4863 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420463 4863 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420516 4863 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420525 4863 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420532 4863 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420541 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420548 4863 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420556 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420564 4863 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420571 4863 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420583 4863 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420591 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420599 4863 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420607 4863 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420615 4863 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420622 4863 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420631 4863 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420639 4863 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420647 4863 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420656 4863 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420665 4863 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420673 4863 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420681 4863 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420689 4863 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420696 4863 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420704 4863 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420711 4863 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420720 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420728 4863 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420736 4863 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420746 4863 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420755 4863 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420764 4863 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420771 4863 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420780 4863 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420787 4863 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420795 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420802 4863 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420810 4863 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420820 4863 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420830 4863 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420839 4863 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420848 4863 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.420858 4863 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.420870 4863 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.421147 4863 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.425940 4863 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.426112 4863 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.426990 4863 server.go:997] "Starting client certificate rotation" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.427028 4863 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.427226 4863 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-17 23:13:53.746598712 +0000 UTC Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.427311 4863 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1048h27m41.319291864s for next certificate rotation Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.435034 4863 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.437952 4863 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.450765 4863 log.go:25] "Validated CRI v1 runtime API" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.473092 4863 log.go:25] "Validated CRI v1 image API" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.475339 4863 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.478321 4863 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-06-41-23-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.478376 4863 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.506412 4863 manager.go:217] Machine: {Timestamp:2025-12-05 06:46:12.504252788 +0000 UTC m=+0.230249918 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:dc36ee67-6abe-4278-a52c-0ad78d90ac7f BootID:1f49cfa4-de7c-4c8e-a3ce-17df066aabb4 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:89:c0:e0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:89:c0:e0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a1:32:8d Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:4e:f1:48 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:25:97:f2 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:56:ef:5f Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:db:3d:4b Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1e:8c:ad:ac:50:b2 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:02:44:b5:3a:1f:3b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.506861 4863 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.507186 4863 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.508176 4863 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.508520 4863 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.508573 4863 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.508922 4863 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.508941 4863 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.509226 4863 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.509289 4863 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.509706 4863 state_mem.go:36] "Initialized new in-memory state store" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.510757 4863 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.511776 4863 kubelet.go:418] "Attempting to sync node with API server" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.511810 4863 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.511861 4863 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.511883 4863 kubelet.go:324] "Adding apiserver pod source" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.511903 4863 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.514253 4863 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.514773 4863 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.515978 4863 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.516252 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.516274 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.516411 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.516437 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516761 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516802 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516817 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516830 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516853 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516866 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516881 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516903 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516918 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516932 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516949 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.516963 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.517576 4863 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.518335 4863 server.go:1280] "Started kubelet" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.518684 4863 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.518849 4863 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.518878 4863 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.519504 4863 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 06:46:12 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.521087 4863 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.106:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e3ed145f092bd default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 06:46:12.518294205 +0000 UTC m=+0.244291275,LastTimestamp:2025-12-05 06:46:12.518294205 +0000 UTC m=+0.244291275,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.522309 4863 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.522385 4863 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.523062 4863 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 21:46:49.933501428 +0000 UTC Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.523171 4863 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 399h0m37.410335143s for next certificate rotation Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.523402 4863 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.523462 4863 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.523403 4863 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.524179 4863 server.go:460] "Adding debug handlers to kubelet server" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.524310 4863 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.526000 4863 factory.go:55] Registering systemd factory Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.526087 4863 factory.go:221] Registration of the systemd container factory successfully Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.526836 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="200ms" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.527009 4863 factory.go:153] Registering CRI-O factory Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.527087 4863 factory.go:221] Registration of the crio container factory successfully Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.527232 4863 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.527385 4863 factory.go:103] Registering Raw factory Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.527453 4863 manager.go:1196] Started watching for new ooms in manager Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.529222 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.529408 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.533688 4863 manager.go:319] Starting recovery of all containers Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.549745 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.549889 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.549933 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.549964 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.549991 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550021 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550088 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550115 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550147 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550171 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550197 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550225 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550251 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550286 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550311 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550336 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550361 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550388 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550412 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550438 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550462 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550531 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550662 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550698 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550724 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550748 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550779 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550807 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550833 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550864 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550889 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550915 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.550943 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551017 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551045 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551072 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551099 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551124 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551150 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551175 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551213 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551248 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551272 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551304 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551330 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551358 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551394 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551419 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551446 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551503 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551533 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551572 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551609 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551636 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551664 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551692 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551723 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551751 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551777 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551805 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551831 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551859 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551885 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551911 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551935 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551960 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.551985 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552010 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552033 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552059 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552082 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552117 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552146 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552173 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552200 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552223 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552250 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552279 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552304 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552333 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552362 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552386 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552411 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552436 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552460 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552603 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552628 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552652 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552677 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552702 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552726 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552753 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552778 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552808 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552833 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552856 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552881 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552905 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552945 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552973 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.552998 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553109 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553140 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553166 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553218 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553250 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553279 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553317 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553346 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553377 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553403 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553432 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553458 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553517 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553546 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553572 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553596 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553646 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553673 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553696 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553723 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553747 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553773 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553797 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553833 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553874 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553900 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553925 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553952 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.553978 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554002 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554061 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554086 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554113 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554141 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554246 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554274 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554302 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554383 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554412 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554438 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554501 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554538 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554566 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554633 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554658 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554685 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554710 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554736 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554851 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554883 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554926 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.554964 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555002 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555030 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555061 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555091 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555119 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555145 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.555173 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556130 4863 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556186 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556221 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556250 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556276 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556302 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556326 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556349 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556397 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556425 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556451 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556512 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556545 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556578 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556602 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556629 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556656 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556683 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556762 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556791 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556819 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556847 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556880 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556906 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556943 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556969 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.556994 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557027 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557053 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557081 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557108 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557133 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557161 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557189 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557219 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557248 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557276 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557302 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557341 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557368 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557393 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557418 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557447 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557505 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557535 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557564 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557592 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557618 4863 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557641 4863 reconstruct.go:97] "Volume reconstruction finished" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.557658 4863 reconciler.go:26] "Reconciler: start to sync state" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.576263 4863 manager.go:324] Recovery completed Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.591667 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.596697 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.596745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.596760 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.597494 4863 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.597520 4863 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.597543 4863 state_mem.go:36] "Initialized new in-memory state store" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.598134 4863 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.599215 4863 policy_none.go:49] "None policy: Start" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.599974 4863 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.600013 4863 state_mem.go:35] "Initializing new in-memory state store" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.600549 4863 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.600587 4863 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.600612 4863 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.600658 4863 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 06:46:12 crc kubenswrapper[4863]: W1205 06:46:12.602141 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.602216 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.623777 4863 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.651665 4863 manager.go:334] "Starting Device Plugin manager" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.651753 4863 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.651770 4863 server.go:79] "Starting device plugin registration server" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.652359 4863 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.652380 4863 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.652813 4863 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.652912 4863 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.652933 4863 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.662669 4863 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.700930 4863 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.701063 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.702550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.702603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.702615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.702846 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.703100 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.703220 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.703821 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.703858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.703870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704065 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704190 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704233 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704566 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.704641 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705228 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705265 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705276 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705422 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705453 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705519 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705768 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705902 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.705948 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707309 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707352 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707561 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707573 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707689 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707703 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707708 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.707766 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.708599 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.708647 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.708660 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.708839 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.708863 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.709103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.709143 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.709157 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.710802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.710848 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.710860 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.727660 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="400ms" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.753038 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.754758 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.754924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.755014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.755111 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.755844 4863 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.106:6443: connect: connection refused" node="crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760015 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760063 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760105 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760125 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760143 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760179 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760206 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760242 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760262 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760280 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760324 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760356 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760373 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760407 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.760423 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862014 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862143 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862180 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862216 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862249 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862289 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862323 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862319 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862356 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862398 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862325 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862326 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862435 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862373 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862401 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862281 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862508 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862553 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862573 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862446 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862625 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862687 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862700 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862743 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862751 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862904 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.862803 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.956387 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.958117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.958177 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.958197 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:12 crc kubenswrapper[4863]: I1205 06:46:12.958233 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:12 crc kubenswrapper[4863]: E1205 06:46:12.958832 4863 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.106:6443: connect: connection refused" node="crc" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.042181 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.047683 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.073009 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.079878 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-b4bdbcd97aa8ae01779b9ae511514db06bc5e4db9841a28ee7e6d80a93645e45 WatchSource:0}: Error finding container b4bdbcd97aa8ae01779b9ae511514db06bc5e4db9841a28ee7e6d80a93645e45: Status 404 returned error can't find the container with id b4bdbcd97aa8ae01779b9ae511514db06bc5e4db9841a28ee7e6d80a93645e45 Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.083632 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-7f8ab9dfc688bdc932d514bd0f494dd412c7cc07e0cd855eace153668dd76b58 WatchSource:0}: Error finding container 7f8ab9dfc688bdc932d514bd0f494dd412c7cc07e0cd855eace153668dd76b58: Status 404 returned error can't find the container with id 7f8ab9dfc688bdc932d514bd0f494dd412c7cc07e0cd855eace153668dd76b58 Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.096423 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-bf51f5b5492ba6be924fb2fec074d45931b83fa4aaded39cd3c3387a804b6f00 WatchSource:0}: Error finding container bf51f5b5492ba6be924fb2fec074d45931b83fa4aaded39cd3c3387a804b6f00: Status 404 returned error can't find the container with id bf51f5b5492ba6be924fb2fec074d45931b83fa4aaded39cd3c3387a804b6f00 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.104034 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.108715 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.129327 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="800ms" Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.134733 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-07631fc1218dccd6185f288ba6ab83c1d8cc89b5013be4d620aeba2fd6ddd491 WatchSource:0}: Error finding container 07631fc1218dccd6185f288ba6ab83c1d8cc89b5013be4d620aeba2fd6ddd491: Status 404 returned error can't find the container with id 07631fc1218dccd6185f288ba6ab83c1d8cc89b5013be4d620aeba2fd6ddd491 Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.139260 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-55c9450044fc1d10dfd13ff2254d459ea12d6780e1f9690a32ee4757ec3a5b38 WatchSource:0}: Error finding container 55c9450044fc1d10dfd13ff2254d459ea12d6780e1f9690a32ee4757ec3a5b38: Status 404 returned error can't find the container with id 55c9450044fc1d10dfd13ff2254d459ea12d6780e1f9690a32ee4757ec3a5b38 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.359492 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.361484 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.361520 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.361529 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.361556 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.361987 4863 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.106:6443: connect: connection refused" node="crc" Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.407459 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.407547 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.520013 4863 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.606824 4863 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231" exitCode=0 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.607072 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.607222 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"55c9450044fc1d10dfd13ff2254d459ea12d6780e1f9690a32ee4757ec3a5b38"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.607400 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.609224 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.609323 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.609361 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.610922 4863 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978" exitCode=0 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.610995 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.611043 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"07631fc1218dccd6185f288ba6ab83c1d8cc89b5013be4d620aeba2fd6ddd491"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.611159 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.612094 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.612118 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.612128 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.613375 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.613456 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bf51f5b5492ba6be924fb2fec074d45931b83fa4aaded39cd3c3387a804b6f00"} Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.614511 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.614565 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.615707 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae" exitCode=0 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.616357 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.616383 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7f8ab9dfc688bdc932d514bd0f494dd412c7cc07e0cd855eace153668dd76b58"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.616458 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.618801 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.618858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.618876 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.621312 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.621389 4863 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af" exitCode=0 Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.621425 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.621450 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4bdbcd97aa8ae01779b9ae511514db06bc5e4db9841a28ee7e6d80a93645e45"} Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.621696 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622352 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622406 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622430 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622443 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622464 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:13 crc kubenswrapper[4863]: I1205 06:46:13.622512 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.721289 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.721399 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.930501 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="1.6s" Dec 05 06:46:13 crc kubenswrapper[4863]: W1205 06:46:13.986888 4863 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.106:6443: connect: connection refused Dec 05 06:46:13 crc kubenswrapper[4863]: E1205 06:46:13.986948 4863 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.106:6443: connect: connection refused" logger="UnhandledError" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.162651 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.164378 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.164411 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.164424 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.164459 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.628147 4863 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11" exitCode=0 Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.628255 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.628420 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.629794 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.629830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.629859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.633293 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2aed5a46f944c7cf02ef5b14d140f1a620df805de2f3bd613aaed7882ac48b63"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.633523 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.634958 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.634994 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.635004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.636412 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.636443 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.636457 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.636626 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.637839 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.637888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.637907 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.639030 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.639056 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.639067 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.639104 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.640356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.640406 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.640425 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.642850 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.642879 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.642891 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.642902 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886"} Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.642991 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.644535 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.644582 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:14 crc kubenswrapper[4863]: I1205 06:46:14.644601 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.649209 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68"} Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.650872 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.650986 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.652915 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.652979 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.653009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.657278 4863 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf" exitCode=0 Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.657532 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.657556 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.658777 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf"} Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.658891 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659066 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659166 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659214 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659222 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659232 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659258 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.659277 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.660604 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.660649 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:15 crc kubenswrapper[4863]: I1205 06:46:15.660670 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.665744 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"84fb29c89aa50dfb3017e82df15b78a424dd0effd55cdff9bf098ce6406147f1"} Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.665823 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"55151f6865e758a63876b64c3c06d1b05333826e68dfc1e9d9a4e54711edd14d"} Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.665847 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a95fa1429b4d9be6b72b49ec46b8f53eeb53345ca9756c97c25f438f41b26fbe"} Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.665912 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.667617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.667672 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.667693 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.806282 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.806519 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.806574 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.808239 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.808312 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:16 crc kubenswrapper[4863]: I1205 06:46:16.808331 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.407995 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.675285 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7f783473338bf6df0069d590ca882361192cb61febb25c0a3924d59a1837157d"} Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.675352 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a7c0caacf023bdb67ff30ecbac2a9b09b5e42040946f5d6221aee23351d9b8da"} Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.675378 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.675417 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.675433 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.676947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.676989 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.677009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.677051 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.677072 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.677083 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.734785 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.735044 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.736868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.736917 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.736961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:17 crc kubenswrapper[4863]: I1205 06:46:17.743407 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.491537 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.578547 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.678763 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.678825 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.681394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.681524 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.681554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.684724 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.684781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:18 crc kubenswrapper[4863]: I1205 06:46:18.684801 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.196859 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.197110 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.198673 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.198756 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.198781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.681428 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.681581 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.681794 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683203 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683261 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683278 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683808 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683855 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:19 crc kubenswrapper[4863]: I1205 06:46:19.683875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.434593 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.434848 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.436303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.436371 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.436393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.492344 4863 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 06:46:21 crc kubenswrapper[4863]: I1205 06:46:21.492448 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.055786 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.056012 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.057609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.057664 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.057684 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.239064 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:22 crc kubenswrapper[4863]: E1205 06:46:22.663762 4863 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.689069 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.690413 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.690515 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:22 crc kubenswrapper[4863]: I1205 06:46:22.690544 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:24 crc kubenswrapper[4863]: E1205 06:46:24.166217 4863 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 05 06:46:24 crc kubenswrapper[4863]: E1205 06:46:24.458935 4863 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187e3ed145f092bd default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 06:46:12.518294205 +0000 UTC m=+0.244291275,LastTimestamp:2025-12-05 06:46:12.518294205 +0000 UTC m=+0.244291275,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 06:46:24 crc kubenswrapper[4863]: I1205 06:46:24.520783 4863 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 06:46:25 crc kubenswrapper[4863]: E1205 06:46:25.531595 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.643341 4863 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.643436 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.766415 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.767710 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.767748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.767759 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:25 crc kubenswrapper[4863]: I1205 06:46:25.767784 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.614214 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.614950 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.616259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.616329 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.616353 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.637620 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.705347 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.706739 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.706811 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:28 crc kubenswrapper[4863]: I1205 06:46:28.706834 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.001403 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.001768 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.003336 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.003429 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.003528 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.008808 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.631099 4863 trace.go:236] Trace[748836605]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 06:46:16.008) (total time: 14622ms): Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[748836605]: ---"Objects listed" error: 14622ms (06:46:30.630) Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[748836605]: [14.622387018s] [14.622387018s] END Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.631146 4863 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.631129 4863 trace.go:236] Trace[1525519244]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 06:46:16.625) (total time: 14005ms): Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[1525519244]: ---"Objects listed" error: 14005ms (06:46:30.631) Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[1525519244]: [14.005482325s] [14.005482325s] END Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.631237 4863 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.632646 4863 trace.go:236] Trace[1198097914]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 06:46:17.176) (total time: 13455ms): Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[1198097914]: ---"Objects listed" error: 13455ms (06:46:30.631) Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[1198097914]: [13.455771419s] [13.455771419s] END Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.632682 4863 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.635064 4863 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.635920 4863 trace.go:236] Trace[597338680]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 06:46:16.330) (total time: 14305ms): Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[597338680]: ---"Objects listed" error: 14305ms (06:46:30.635) Dec 05 06:46:30 crc kubenswrapper[4863]: Trace[597338680]: [14.305748103s] [14.305748103s] END Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.635961 4863 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.692726 4863 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49862->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.693160 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49862->192.168.126.11:17697: read: connection reset by peer" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.692736 4863 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42790->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.693514 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42790->192.168.126.11:17697: read: connection reset by peer" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.694555 4863 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.694625 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.711711 4863 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.712030 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.725589 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.733326 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:30 crc kubenswrapper[4863]: I1205 06:46:30.735978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:30 crc kubenswrapper[4863]: E1205 06:46:30.774793 4863 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.522920 4863 apiserver.go:52] "Watching apiserver" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.526956 4863 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.527414 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.527842 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.527972 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.528064 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.528197 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.528235 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.528622 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.528664 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.528377 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.528557 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.530355 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.531601 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.532099 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.532122 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.532293 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.533203 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.533780 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.534035 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.534774 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.571672 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.597282 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.615533 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.625414 4863 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.631095 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641532 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641666 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641689 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641715 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641750 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641776 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641799 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641823 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641853 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641877 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.641901 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.641949 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:46:32.141906026 +0000 UTC m=+19.867903106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642011 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642064 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642114 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642148 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642179 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642215 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642250 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642282 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642281 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642318 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642404 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642414 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642433 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642450 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642509 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642545 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642580 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642614 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642647 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642682 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642697 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642714 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642753 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642787 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642818 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642855 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642891 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642921 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642952 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642987 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643020 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643060 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643095 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643192 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643228 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643267 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643304 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643343 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643381 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643419 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643456 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643531 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643566 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643598 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643633 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643665 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643696 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643730 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643763 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643816 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643848 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643880 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643913 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643948 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643986 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644019 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644111 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644144 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644295 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644329 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644362 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644395 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644431 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644463 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644523 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644557 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644593 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644626 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644659 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644697 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644782 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644814 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644848 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644879 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642667 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642729 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642769 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644961 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.642954 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643005 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643054 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643593 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643680 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643859 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643881 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643926 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643943 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.643999 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644014 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644049 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644179 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644225 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644225 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644265 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644285 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644298 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644388 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644431 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644482 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644645 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644886 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644938 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.644912 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645210 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645240 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645271 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645278 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645289 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645296 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645350 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645355 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645367 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645456 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645521 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645550 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645575 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645601 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645626 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645650 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645674 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645701 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645724 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645757 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645785 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645806 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645818 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645845 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645854 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645886 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645922 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645951 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.645978 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646005 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646035 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646066 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646094 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646124 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646154 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646184 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646213 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646242 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646274 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646296 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646304 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646357 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646385 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646411 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646435 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646457 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646457 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646501 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646500 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646528 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646552 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646579 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646655 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646670 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646682 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646712 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646731 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646753 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646808 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646818 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646844 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.646832 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647020 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647027 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647153 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647202 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647246 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647282 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647318 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647354 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647388 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647421 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647458 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647524 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647558 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647592 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647625 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647661 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647695 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647729 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647763 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647817 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647849 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647881 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647916 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647950 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647983 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648018 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648056 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648089 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648124 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648158 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648191 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648265 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648300 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648336 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648371 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648410 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648445 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648504 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648546 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648583 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648623 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648658 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648695 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648728 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648761 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648798 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648835 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648869 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648901 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648936 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648977 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649011 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649046 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649081 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649116 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649150 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649186 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649226 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649265 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649301 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649338 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649375 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649411 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649444 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649547 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649591 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649640 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649679 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649720 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649758 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649796 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649834 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649878 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649917 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649952 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649995 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650030 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650069 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650157 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650180 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650203 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650223 4863 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650243 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650264 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650286 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650306 4863 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650328 4863 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650346 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650366 4863 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650385 4863 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650404 4863 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650424 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650442 4863 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650463 4863 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651775 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651826 4863 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651884 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651925 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651950 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652005 4863 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652068 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652147 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652314 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652335 4863 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652371 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652391 4863 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652413 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652434 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652454 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652497 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652567 4863 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652590 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652611 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652633 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652652 4863 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652744 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652766 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.652874 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.653007 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.653038 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.653057 4863 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.653078 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.653099 4863 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647156 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647161 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647285 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647342 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647357 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647534 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.647632 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648013 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648353 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648371 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648537 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.648798 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649007 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649014 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649046 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649135 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649217 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649587 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649620 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649717 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649842 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649838 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649977 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.649988 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650248 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.650874 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.651170 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.655040 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.655290 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.655825 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.656041 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.656059 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.656378 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.656594 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.656770 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.657038 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.657283 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.660762 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.661187 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.661651 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.661950 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.662174 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.662215 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.662340 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.662679 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.663293 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.663344 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.663669 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.663740 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.664152 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.664198 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.664238 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665055 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665122 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665291 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665343 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665606 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665742 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.666009 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.665928 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.666665 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.666753 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667237 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667442 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667532 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667533 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667660 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667846 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.667848 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.668288 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.668557 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.668848 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.668882 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.668881 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.669075 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.669817 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.670159 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.670206 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.670429 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.670558 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671027 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671330 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671343 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671528 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671571 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.671878 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.672147 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.672257 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.672249 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.672328 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:32.172306083 +0000 UTC m=+19.898303143 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.672348 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.672542 4863 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.672551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.672740 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.672836 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:32.172812094 +0000 UTC m=+19.898809174 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.674898 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.676302 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.676561 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.677924 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.678423 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.678686 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.678697 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.684767 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.684802 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.684818 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.684888 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:32.184866769 +0000 UTC m=+19.910864029 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.686174 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.686772 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.686846 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687257 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687390 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687486 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687554 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687663 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687893 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.687965 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.688039 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.688055 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.688067 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.688102 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:32.18809121 +0000 UTC m=+19.914088240 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.688033 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.688175 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.688419 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.688936 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689094 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689119 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689125 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689542 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.689527 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690012 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690092 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690019 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690169 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690212 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690328 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690435 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690636 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690692 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.690752 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.691360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.691553 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.691646 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.694769 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.696527 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.697497 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.697709 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.698098 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.698414 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.698961 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.698985 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.699163 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.699298 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.699868 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.700777 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.701166 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.701360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.701529 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.701540 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.702566 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.702796 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.702940 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.703162 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.703247 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.703397 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.703452 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.704761 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.706746 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.711232 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.716586 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.717627 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.719830 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68" exitCode=255 Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.720624 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68"} Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.721423 4863 scope.go:117] "RemoveContainer" containerID="b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.724889 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.725021 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: E1205 06:46:31.725287 4863 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.726510 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.739911 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.751305 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.753691 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.753805 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.753901 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.753963 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754017 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754077 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754128 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754186 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754241 4863 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754298 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754363 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754442 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754535 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754598 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754655 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754710 4863 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754770 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754850 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754922 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754993 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755075 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755152 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755232 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755300 4863 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755375 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755452 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755550 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755637 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755719 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755827 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755905 4863 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.755980 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756050 4863 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756132 4863 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756215 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756299 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756380 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756460 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756555 4863 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756672 4863 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756748 4863 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756820 4863 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756895 4863 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.756974 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757045 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757120 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757200 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757270 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757339 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757413 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757507 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757593 4863 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757664 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757742 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757823 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757903 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.757986 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758061 4863 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758161 4863 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758236 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758351 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758489 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758610 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758733 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758848 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.758955 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759093 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759211 4863 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754364 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759347 4863 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759492 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759512 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759526 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759539 4863 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759553 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759566 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759578 4863 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759590 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759602 4863 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759614 4863 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759626 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759639 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759652 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759696 4863 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759710 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759823 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759840 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759852 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759866 4863 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759921 4863 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759934 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.759945 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.754328 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.772950 4863 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.772994 4863 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773004 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773015 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773028 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773037 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773045 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773054 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773076 4863 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773085 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773093 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773102 4863 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773111 4863 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773120 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773129 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773138 4863 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773147 4863 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773158 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773166 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773175 4863 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773183 4863 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773192 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773199 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773210 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773219 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773227 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773236 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773243 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773252 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773260 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773267 4863 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773276 4863 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773284 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773292 4863 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773301 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773309 4863 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773317 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773325 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773333 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773341 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773350 4863 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773357 4863 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773366 4863 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773374 4863 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773381 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773390 4863 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773398 4863 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773407 4863 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773414 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773423 4863 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773433 4863 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773443 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773452 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773461 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773485 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773494 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773502 4863 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773511 4863 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773520 4863 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.773527 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.785751 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.797388 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.811344 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.821444 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.831894 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.841311 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.849362 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 06:46:31 crc kubenswrapper[4863]: W1205 06:46:31.862260 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-8a98ef64ea5657d0c1ab8580cc99720b193503923570a511a20ee6674e56cd24 WatchSource:0}: Error finding container 8a98ef64ea5657d0c1ab8580cc99720b193503923570a511a20ee6674e56cd24: Status 404 returned error can't find the container with id 8a98ef64ea5657d0c1ab8580cc99720b193503923570a511a20ee6674e56cd24 Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.863309 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 06:46:31 crc kubenswrapper[4863]: I1205 06:46:31.874683 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 06:46:31 crc kubenswrapper[4863]: W1205 06:46:31.875304 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-46215e90240c5b7052a105aec47b3ec368f15ae8e2f316ddebc7e14282339fc2 WatchSource:0}: Error finding container 46215e90240c5b7052a105aec47b3ec368f15ae8e2f316ddebc7e14282339fc2: Status 404 returned error can't find the container with id 46215e90240c5b7052a105aec47b3ec368f15ae8e2f316ddebc7e14282339fc2 Dec 05 06:46:31 crc kubenswrapper[4863]: W1205 06:46:31.893102 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-f45e30fa31d62d91f3a3a5da37b0d4c2105317a0d7765389cdc151856795740d WatchSource:0}: Error finding container f45e30fa31d62d91f3a3a5da37b0d4c2105317a0d7765389cdc151856795740d: Status 404 returned error can't find the container with id f45e30fa31d62d91f3a3a5da37b0d4c2105317a0d7765389cdc151856795740d Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.176284 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.176414 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.176580 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.176681 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.176719 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.176798 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:33.176768051 +0000 UTC m=+20.902765131 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.176829 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:33.176815402 +0000 UTC m=+20.902812482 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.176956 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:46:33.176942664 +0000 UTC m=+20.902939734 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.277611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.277679 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.277854 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.277880 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.277899 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.277965 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:33.277944412 +0000 UTC m=+21.003941482 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.278047 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.278065 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.278079 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:32 crc kubenswrapper[4863]: E1205 06:46:32.278114 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:33.278102496 +0000 UTC m=+21.004099566 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.607691 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.609450 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.611854 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.613205 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.615259 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.616408 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.617726 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.619707 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.621129 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.623119 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.624236 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.626650 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.627746 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.628952 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.630648 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.631159 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.632377 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.634578 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.635445 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.636785 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.638922 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.639983 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.642134 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.643149 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.645156 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.645885 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.646794 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.648314 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.649087 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.650402 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.651112 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.652360 4863 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.652706 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.654163 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.655154 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.656493 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.657251 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.662883 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.664499 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.665249 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.671206 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.672086 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.673154 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.673901 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.675180 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.675982 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.677155 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.678798 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.679191 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.680609 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.682611 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.683677 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.684281 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.685312 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.686043 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.686734 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.687786 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.704113 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.724506 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.724574 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8a98ef64ea5657d0c1ab8580cc99720b193503923570a511a20ee6674e56cd24"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.726437 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.727423 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.729744 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.730253 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.737202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.737388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.737569 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f45e30fa31d62d91f3a3a5da37b0d4c2105317a0d7765389cdc151856795740d"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.740278 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"46215e90240c5b7052a105aec47b3ec368f15ae8e2f316ddebc7e14282339fc2"} Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.747083 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.768057 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.786125 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.801831 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.821768 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.844811 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.861809 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.879557 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.891777 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.906385 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:32 crc kubenswrapper[4863]: I1205 06:46:32.920060 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.926916 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.927017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.927062 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.927111 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.927252 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.927368 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.927445 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:35.927421945 +0000 UTC m=+23.653419025 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.928738 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.927682 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930147 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930177 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.929944 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930203 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.927967 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.928269 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930520 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930572 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930621 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930013 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:46:35.929990801 +0000 UTC m=+23.655987881 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930687 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:35.930663686 +0000 UTC m=+23.656660756 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930710 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:35.930698857 +0000 UTC m=+23.656695937 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.930732 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:35.930721228 +0000 UTC m=+23.656718308 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.932092 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:33 crc kubenswrapper[4863]: E1205 06:46:33.932237 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.975195 4863 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.977714 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.977862 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.977887 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.977985 4863 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.995806 4863 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.996144 4863 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.997961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.998014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.998034 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.998060 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:33 crc kubenswrapper[4863]: I1205 06:46:33.998085 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:33Z","lastTransitionTime":"2025-12-05T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.028524 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.040109 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.040160 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.040173 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.040192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.040204 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.059777 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.063298 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.063327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.063335 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.063348 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.063358 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.081626 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.086018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.086070 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.086082 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.086102 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.086116 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.111652 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.115226 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.115283 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.115300 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.115320 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.115338 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.128341 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:34 crc kubenswrapper[4863]: E1205 06:46:34.128593 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.130013 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.130059 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.130072 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.130089 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.130101 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.232383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.232445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.232463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.232520 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.232539 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.334742 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.334777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.334789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.334804 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.334816 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.436816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.436851 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.436859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.436873 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.436882 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.539173 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.539237 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.539255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.539279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.539297 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.642176 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.642254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.642279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.642331 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.642351 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.745985 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.746057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.746077 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.746106 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.746125 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.848958 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.849029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.849049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.849076 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.849095 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.952054 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.952127 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.952146 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.952176 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:34 crc kubenswrapper[4863]: I1205 06:46:34.952195 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:34Z","lastTransitionTime":"2025-12-05T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.054644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.054698 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.054715 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.054735 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.054752 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.158217 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.158282 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.158300 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.158327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.158345 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.261722 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.261791 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.261809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.261833 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.261851 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.364871 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.364939 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.364957 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.364984 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.365003 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.468382 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.468450 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.468466 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.468527 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.468545 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.571679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.571753 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.571770 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.571794 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.571813 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.601001 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.601076 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.601027 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.601213 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.601612 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.601737 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.674578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.674631 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.674648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.674671 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.674686 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.777766 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.777835 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.777858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.777887 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.777910 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.879938 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.879978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.879988 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.880004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.880017 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.943929 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.944005 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.944037 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.944090 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.944119 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944164 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:46:39.944123922 +0000 UTC m=+27.670121002 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944207 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944273 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:39.944252565 +0000 UTC m=+27.670249725 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944278 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944357 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944381 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944393 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:39.944365817 +0000 UTC m=+27.670362907 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944294 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944455 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944494 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944404 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944536 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:39.944524241 +0000 UTC m=+27.670521391 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:35 crc kubenswrapper[4863]: E1205 06:46:35.944569 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:39.944550001 +0000 UTC m=+27.670547131 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.982834 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.982873 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.982884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.982898 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:35 crc kubenswrapper[4863]: I1205 06:46:35.982910 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:35Z","lastTransitionTime":"2025-12-05T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.085129 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.085161 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.085170 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.085182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.085193 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.187218 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.187260 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.187271 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.187288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.187298 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.289204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.289266 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.289277 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.289303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.289316 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.392402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.392441 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.392449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.392463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.392491 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.495578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.495635 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.495652 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.495676 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.495693 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.598202 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.598239 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.598247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.598263 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.598273 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.613430 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-9ctrk"] Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.614911 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.619900 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.621857 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.621913 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.640299 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.649880 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd42p\" (UniqueName: \"kubernetes.io/projected/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-kube-api-access-vd42p\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.649932 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-hosts-file\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.651518 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.663311 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.674340 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.684016 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.697166 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.700686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.700716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.700736 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.700750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.700760 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.713748 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.726078 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.734779 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.751190 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-hosts-file\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.751248 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd42p\" (UniqueName: \"kubernetes.io/projected/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-kube-api-access-vd42p\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.751365 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-hosts-file\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.772808 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd42p\" (UniqueName: \"kubernetes.io/projected/7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a-kube-api-access-vd42p\") pod \"node-resolver-9ctrk\" (UID: \"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\") " pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.802692 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.802725 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.802733 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.802745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.802753 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.904404 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.904449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.904457 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.904484 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.904494 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:36Z","lastTransitionTime":"2025-12-05T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.926513 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9ctrk" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.937261 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e"} Dec 05 06:46:36 crc kubenswrapper[4863]: W1205 06:46:36.938512 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c8fa48f_3360_4c3a_b4e8_958a84ee6a9a.slice/crio-2734399fa5bf120caa9dbbd8e029542737cb1939892013935b24dff6ee2f2220 WatchSource:0}: Error finding container 2734399fa5bf120caa9dbbd8e029542737cb1939892013935b24dff6ee2f2220: Status 404 returned error can't find the container with id 2734399fa5bf120caa9dbbd8e029542737cb1939892013935b24dff6ee2f2220 Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.957088 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.968550 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:36 crc kubenswrapper[4863]: I1205 06:46:36.979691 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:36Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.004365 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.007941 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.007989 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.007996 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.008009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.008019 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.015574 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vw8fd"] Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.015827 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.017111 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.019325 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.019502 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.019589 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.019630 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-whgzt"] Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.020133 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.020179 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-mnkj9"] Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.020596 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.020647 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.021235 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.021443 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.021743 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.022107 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.022200 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.022371 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.024491 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.024712 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.053969 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cni-binary-copy\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054004 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-bin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054021 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-daemon-config\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054041 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sccxh\" (UniqueName: \"kubernetes.io/projected/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-kube-api-access-sccxh\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054057 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-os-release\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054072 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-etc-kubernetes\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054084 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-netns\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054097 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-multus-certs\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054110 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.054129 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b66zm\" (UniqueName: \"kubernetes.io/projected/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-kube-api-access-b66zm\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057134 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057232 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057315 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-kubelet\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057348 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-system-cni-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057376 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b6dd012a-040f-4504-9866-21443f9165d4-rootfs\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057396 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057459 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-conf-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057502 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-hostroot\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057524 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cnibin\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057545 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-system-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057563 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b6dd012a-040f-4504-9866-21443f9165d4-proxy-tls\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057584 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-os-release\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6dd012a-040f-4504-9866-21443f9165d4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057630 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58b62\" (UniqueName: \"kubernetes.io/projected/b6dd012a-040f-4504-9866-21443f9165d4-kube-api-access-58b62\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057707 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-multus\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057743 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-socket-dir-parent\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057788 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-k8s-cni-cncf-io\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.057809 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cnibin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.080531 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.103781 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.111207 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.111228 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.111235 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.111246 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.111256 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.121162 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.143487 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.152613 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158405 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-kubelet\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158433 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-system-cni-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158453 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b6dd012a-040f-4504-9866-21443f9165d4-rootfs\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158484 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158502 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-conf-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158520 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-hostroot\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158526 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b6dd012a-040f-4504-9866-21443f9165d4-rootfs\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158536 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cnibin\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158579 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cnibin\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158578 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-system-cni-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158589 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-hostroot\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158623 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-system-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158589 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-conf-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158650 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b6dd012a-040f-4504-9866-21443f9165d4-proxy-tls\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158666 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-os-release\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158688 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6dd012a-040f-4504-9866-21443f9165d4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158707 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58b62\" (UniqueName: \"kubernetes.io/projected/b6dd012a-040f-4504-9866-21443f9165d4-kube-api-access-58b62\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158716 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-system-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158493 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-kubelet\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158738 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-socket-dir-parent\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158754 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-k8s-cni-cncf-io\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158768 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-multus\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158783 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158799 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cnibin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-bin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158831 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-daemon-config\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158854 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cni-binary-copy\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158870 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-os-release\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158883 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-etc-kubernetes\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sccxh\" (UniqueName: \"kubernetes.io/projected/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-kube-api-access-sccxh\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.158916 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-netns\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159003 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-os-release\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159031 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-bin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-socket-dir-parent\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159084 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-k8s-cni-cncf-io\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159104 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-var-lib-cni-multus\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159140 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-os-release\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159149 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159230 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-multus-certs\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159251 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159272 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b66zm\" (UniqueName: \"kubernetes.io/projected/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-kube-api-access-b66zm\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159295 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159297 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6dd012a-040f-4504-9866-21443f9165d4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159341 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-etc-kubernetes\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159343 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-multus-certs\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159366 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-host-run-netns\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159396 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cnibin\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159411 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-cni-dir\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159577 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159664 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159714 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-multus-daemon-config\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.159799 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-cni-binary-copy\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.162557 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b6dd012a-040f-4504-9866-21443f9165d4-proxy-tls\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.163196 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.173444 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b66zm\" (UniqueName: \"kubernetes.io/projected/b9e2cdef-4a53-4f32-b973-e5d6ba0708db-kube-api-access-b66zm\") pod \"multus-vw8fd\" (UID: \"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\") " pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.176085 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.178435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sccxh\" (UniqueName: \"kubernetes.io/projected/2c97ee70-4a26-46eb-9b38-0c53ff2189a1-kube-api-access-sccxh\") pod \"multus-additional-cni-plugins-whgzt\" (UID: \"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\") " pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.180368 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58b62\" (UniqueName: \"kubernetes.io/projected/b6dd012a-040f-4504-9866-21443f9165d4-kube-api-access-58b62\") pod \"machine-config-daemon-mnkj9\" (UID: \"b6dd012a-040f-4504-9866-21443f9165d4\") " pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.186304 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.195946 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.207313 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.213646 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.213683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.213692 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.213708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.213717 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.217853 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.228382 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.239817 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.249531 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.265390 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.316432 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.316462 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.316490 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.316504 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.316514 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.326729 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vw8fd" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.332685 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-whgzt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.342026 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:46:37 crc kubenswrapper[4863]: W1205 06:46:37.348270 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9e2cdef_4a53_4f32_b973_e5d6ba0708db.slice/crio-97eef2f5935862d4ed0016420c92deca2c3e151b66ed7d09f29680cff0103c6e WatchSource:0}: Error finding container 97eef2f5935862d4ed0016420c92deca2c3e151b66ed7d09f29680cff0103c6e: Status 404 returned error can't find the container with id 97eef2f5935862d4ed0016420c92deca2c3e151b66ed7d09f29680cff0103c6e Dec 05 06:46:37 crc kubenswrapper[4863]: W1205 06:46:37.356728 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c97ee70_4a26_46eb_9b38_0c53ff2189a1.slice/crio-460fbeed8375a67cef67a559b0d612a2c20765a1f76f434699b293679870135b WatchSource:0}: Error finding container 460fbeed8375a67cef67a559b0d612a2c20765a1f76f434699b293679870135b: Status 404 returned error can't find the container with id 460fbeed8375a67cef67a559b0d612a2c20765a1f76f434699b293679870135b Dec 05 06:46:37 crc kubenswrapper[4863]: W1205 06:46:37.359700 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6dd012a_040f_4504_9866_21443f9165d4.slice/crio-502243bd6bc21a910ee87a346415ac5da3b5336053824710b51e55be5be2695b WatchSource:0}: Error finding container 502243bd6bc21a910ee87a346415ac5da3b5336053824710b51e55be5be2695b: Status 404 returned error can't find the container with id 502243bd6bc21a910ee87a346415ac5da3b5336053824710b51e55be5be2695b Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.419544 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.419584 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.419596 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.419613 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.419627 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.426111 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xjcxh"] Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.427007 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.429826 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.430169 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.430819 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.430915 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.430982 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.431375 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.431383 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.447892 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.461932 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.461963 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462011 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462063 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462100 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462116 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462130 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462317 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q27nh\" (UniqueName: \"kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462431 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462550 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462589 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462885 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462951 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.462994 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.463031 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.463110 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.463145 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.463189 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.463242 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.467692 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.491969 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.505147 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.522067 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.526810 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.526876 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.526899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.526926 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.526945 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.542677 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.558091 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564650 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564702 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564737 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564783 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564795 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564815 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564854 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564852 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564889 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564904 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564927 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564950 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564964 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564972 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.564997 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565028 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565031 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565113 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565145 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565176 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q27nh\" (UniqueName: \"kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565209 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565241 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565273 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565301 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565335 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565415 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565536 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565645 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565912 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.565972 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566103 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566192 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566225 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566365 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566492 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.566566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.567596 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.572760 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.585575 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.588684 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q27nh\" (UniqueName: \"kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh\") pod \"ovnkube-node-xjcxh\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.590318 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.601217 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.601255 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:37 crc kubenswrapper[4863]: E1205 06:46:37.601347 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.601401 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:37 crc kubenswrapper[4863]: E1205 06:46:37.601447 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:37 crc kubenswrapper[4863]: E1205 06:46:37.601605 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.605047 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.624748 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.635993 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.636037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.636049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.636073 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.636086 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.645411 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.657987 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739081 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739146 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739189 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739206 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.739746 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:37 crc kubenswrapper[4863]: W1205 06:46:37.750274 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e873158_22c6_4eab_9cb1_438b0f50f46d.slice/crio-302d21980ff0ae7b7085fe86876f26cfc77388b00034166305f31ba33679beda WatchSource:0}: Error finding container 302d21980ff0ae7b7085fe86876f26cfc77388b00034166305f31ba33679beda: Status 404 returned error can't find the container with id 302d21980ff0ae7b7085fe86876f26cfc77388b00034166305f31ba33679beda Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.842941 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.843372 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.843385 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.843401 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.843414 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.942692 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerStarted","Data":"29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.942756 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerStarted","Data":"97eef2f5935862d4ed0016420c92deca2c3e151b66ed7d09f29680cff0103c6e"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.945296 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.945323 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.945331 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.945343 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.945352 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:37Z","lastTransitionTime":"2025-12-05T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.946106 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9ctrk" event={"ID":"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a","Type":"ContainerStarted","Data":"271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.946127 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9ctrk" event={"ID":"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a","Type":"ContainerStarted","Data":"2734399fa5bf120caa9dbbd8e029542737cb1939892013935b24dff6ee2f2220"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.951087 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca" exitCode=0 Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.951197 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.951251 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerStarted","Data":"460fbeed8375a67cef67a559b0d612a2c20765a1f76f434699b293679870135b"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.952683 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"302d21980ff0ae7b7085fe86876f26cfc77388b00034166305f31ba33679beda"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.956037 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.956084 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.956105 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"502243bd6bc21a910ee87a346415ac5da3b5336053824710b51e55be5be2695b"} Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.979103 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:37 crc kubenswrapper[4863]: I1205 06:46:37.991394 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.001658 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.016089 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.026642 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.040513 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.047587 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.047609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.047617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.047629 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.047639 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.052850 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.064223 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.081257 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.094330 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.106921 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.121508 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.133086 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.142780 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.150857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.150894 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.150910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.150931 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.150948 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.152826 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.166543 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.176821 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.194853 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.209341 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.225865 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.240193 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.253879 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.253919 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.253931 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.253948 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.253960 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.255981 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.271284 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.284516 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.295812 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.317593 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.356799 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.356845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.356862 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.356886 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.356903 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.459730 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.459763 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.459771 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.459784 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.459794 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.562659 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.562725 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.562745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.562773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.562792 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.666363 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.666402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.666412 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.666429 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.666442 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.769163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.769232 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.769261 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.769287 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.769304 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.872655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.872705 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.872718 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.872736 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.872751 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.928656 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-tn7hx"] Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.929171 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.931314 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.931353 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.933120 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.933203 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.950102 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.966110 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" exitCode=0 Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.966173 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.970317 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.973356 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498" exitCode=0 Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.973613 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.976140 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.976228 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.976254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.976330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.976423 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:38Z","lastTransitionTime":"2025-12-05T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.979033 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-serviceca\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.979128 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-host\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.979193 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9ct2\" (UniqueName: \"kubernetes.io/projected/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-kube-api-access-b9ct2\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:38 crc kubenswrapper[4863]: I1205 06:46:38.993429 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.015778 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.036184 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.059437 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.070640 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.080061 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-host\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.080148 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9ct2\" (UniqueName: \"kubernetes.io/projected/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-kube-api-access-b9ct2\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.080201 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-serviceca\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.080273 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-host\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081499 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-serviceca\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081949 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.081982 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.086490 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.114123 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.116891 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9ct2\" (UniqueName: \"kubernetes.io/projected/4aafc06e-b11d-4ae3-af3c-f3597b19bc3a-kube-api-access-b9ct2\") pod \"node-ca-tn7hx\" (UID: \"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\") " pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.155985 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.183877 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.183913 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.183924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.183937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.183947 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.186901 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.198889 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.210911 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.231796 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.243425 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.248258 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tn7hx" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.253286 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.266100 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: W1205 06:46:39.267344 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aafc06e_b11d_4ae3_af3c_f3597b19bc3a.slice/crio-369585e9e0fa00fb1f3518d3660fc3fd9250988ff46bae7b73b7602f45dcd60e WatchSource:0}: Error finding container 369585e9e0fa00fb1f3518d3660fc3fd9250988ff46bae7b73b7602f45dcd60e: Status 404 returned error can't find the container with id 369585e9e0fa00fb1f3518d3660fc3fd9250988ff46bae7b73b7602f45dcd60e Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.280265 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.286218 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.286262 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.286289 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.286307 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.286318 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.295810 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.312313 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.324355 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.335861 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.370910 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.385223 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.388828 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.388859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.388870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.388888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.388899 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.397346 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.410897 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.425229 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.438118 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.490910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.491303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.491313 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.491334 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.491346 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.594256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.594318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.594332 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.594354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.594373 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.601292 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.601419 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.601645 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.601670 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.601811 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.601942 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.697393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.697496 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.697516 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.697541 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.697559 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.801386 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.801435 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.801451 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.801505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.801526 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.906069 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.906131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.906150 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.906175 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.906192 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:39Z","lastTransitionTime":"2025-12-05T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982381 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982443 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982464 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982510 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.982545 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.986753 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461" exitCode=0 Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.986826 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.988980 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tn7hx" event={"ID":"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a","Type":"ContainerStarted","Data":"da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.989012 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tn7hx" event={"ID":"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a","Type":"ContainerStarted","Data":"369585e9e0fa00fb1f3518d3660fc3fd9250988ff46bae7b73b7602f45dcd60e"} Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.990204 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990331 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:46:47.990306014 +0000 UTC m=+35.716303094 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.990404 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.990510 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.990584 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990641 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990667 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990685 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990736 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:47.990720663 +0000 UTC m=+35.716717733 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:39 crc kubenswrapper[4863]: I1205 06:46:39.990637 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990761 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990823 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990847 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:47.990834656 +0000 UTC m=+35.716831726 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990861 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990863 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990886 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990949 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:47.990924658 +0000 UTC m=+35.716921738 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:39 crc kubenswrapper[4863]: E1205 06:46:39.990976 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:47.990963148 +0000 UTC m=+35.716960228 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.008779 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.008840 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.008859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.008885 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.008903 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.020596 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.039440 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.052909 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.071200 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.087193 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.102602 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.111888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.111939 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.111956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.111978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.112000 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.126501 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.145274 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.169176 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.188462 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.206501 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.214020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.214057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.214066 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.214080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.214092 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.224135 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.243749 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.267491 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.290765 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.308099 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.317297 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.317342 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.317359 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.317386 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.317405 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.328409 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.351423 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.369959 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.384106 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.413296 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.420031 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.420082 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.420100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.420129 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.420151 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.432395 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.452039 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.465916 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.484225 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.499852 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.519954 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.523441 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.523531 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.523549 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.523579 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.523597 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.537406 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:40Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.625924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.625997 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.626014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.626039 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.626081 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.729234 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.729306 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.729324 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.729351 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.729368 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.832325 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.832410 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.832428 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.832454 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.832504 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.935845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.935910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.935935 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.935968 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.935991 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:40Z","lastTransitionTime":"2025-12-05T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.998043 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60" exitCode=0 Dec 05 06:46:40 crc kubenswrapper[4863]: I1205 06:46:40.998104 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.017918 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.039517 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.039576 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.039594 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.039622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.039640 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.042523 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.062955 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.079994 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.098322 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.113549 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.125762 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.141964 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.142039 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.142109 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.142144 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.142166 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.150041 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.171991 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.192210 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.208398 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.227247 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.241399 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.244883 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.244912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.244924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.244941 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.244952 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.259942 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:41Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.348866 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.349172 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.349354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.349456 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.349594 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.451793 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.451822 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.451830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.451845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.451854 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.555694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.555757 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.555777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.555802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.555820 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.601509 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.601585 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.601953 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:41 crc kubenswrapper[4863]: E1205 06:46:41.602149 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:41 crc kubenswrapper[4863]: E1205 06:46:41.602499 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:41 crc kubenswrapper[4863]: E1205 06:46:41.602745 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.659149 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.659208 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.659225 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.659249 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.659267 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.763000 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.763079 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.763105 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.763136 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.763160 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.866448 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.866519 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.866531 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.866550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.866565 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.969955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.970003 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.970020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.970043 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:41 crc kubenswrapper[4863]: I1205 06:46:41.970061 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:41Z","lastTransitionTime":"2025-12-05T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.006967 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28" exitCode=0 Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.007107 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.011999 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.033817 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.053077 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.073615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.073678 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.073698 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.073723 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.073740 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.086982 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.107311 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.121861 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.135803 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.148372 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.169997 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.176133 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.176175 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.176187 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.176202 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.176212 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.189243 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.203647 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.227085 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.245755 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.262759 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.279072 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.279133 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.279149 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.279173 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.279191 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.283090 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.381511 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.381598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.381623 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.381655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.381678 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.484955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.485014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.485032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.485056 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.485074 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.588370 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.588411 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.588420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.588434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.588445 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.612534 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.626807 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.636510 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.647771 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.656728 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.667461 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.679773 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.690772 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.690816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.690828 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.690843 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.690854 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.697086 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.714828 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.736932 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.752436 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.771300 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.792853 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.792912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.792941 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.792966 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.792983 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.806776 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.826834 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.895911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.895966 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.895984 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.896006 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:42 crc kubenswrapper[4863]: I1205 06:46:42.896024 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:42Z","lastTransitionTime":"2025-12-05T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.008946 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.009037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.009057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.009124 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.009142 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.020451 4863 generic.go:334] "Generic (PLEG): container finished" podID="2c97ee70-4a26-46eb-9b38-0c53ff2189a1" containerID="591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7" exitCode=0 Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.020555 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerDied","Data":"591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.058504 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.077459 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.092902 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.111646 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.111702 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.111734 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.111757 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.111773 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.113391 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.133298 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.146801 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.166619 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.178657 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.192217 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.203623 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215349 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215376 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215384 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215397 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215364 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.215407 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.227036 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.239846 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.250390 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.318793 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.318861 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.318871 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.318888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.318898 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.421077 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.421164 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.421184 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.421212 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.421229 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.524045 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.524085 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.524094 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.524108 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.524118 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.601922 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.601973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.602010 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:43 crc kubenswrapper[4863]: E1205 06:46:43.602120 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:43 crc kubenswrapper[4863]: E1205 06:46:43.602252 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:43 crc kubenswrapper[4863]: E1205 06:46:43.602421 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.627510 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.627553 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.627571 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.627606 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.627626 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.731251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.731326 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.731347 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.731373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.731391 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.835120 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.835219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.835242 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.835301 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.835323 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.939801 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.939858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.939870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.939886 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:43 crc kubenswrapper[4863]: I1205 06:46:43.939898 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:43Z","lastTransitionTime":"2025-12-05T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.042171 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.042238 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.042255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.042275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.042291 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.145343 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.145402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.145419 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.145442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.145460 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.248784 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.248837 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.248854 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.248879 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.248896 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.327636 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.327666 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.327675 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.327690 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.327699 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.342514 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.346247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.346279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.346288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.346302 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.346313 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.367701 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.372228 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.372270 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.372285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.372303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.372316 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.388943 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.392605 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.392644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.392655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.392672 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.392684 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.408002 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.411652 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.411703 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.411732 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.411755 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.411773 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.424929 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:44 crc kubenswrapper[4863]: E1205 06:46:44.425233 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.427048 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.427086 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.427099 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.427119 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.427131 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.529727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.529778 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.529790 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.529809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.529822 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.632931 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.632990 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.633007 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.633029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.633046 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.736014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.736061 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.736079 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.736103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.736120 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.838444 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.838532 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.838549 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.838572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.838588 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.941433 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.941536 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.941567 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.941597 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:44 crc kubenswrapper[4863]: I1205 06:46:44.941618 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:44Z","lastTransitionTime":"2025-12-05T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.035309 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.035746 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.035816 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.043906 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.043963 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.043981 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.044003 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.044019 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.044054 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" event={"ID":"2c97ee70-4a26-46eb-9b38-0c53ff2189a1","Type":"ContainerStarted","Data":"d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.057392 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.076206 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.077308 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.079051 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.101205 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.121736 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.142619 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.147330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.147408 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.147434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.147466 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.147517 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.166146 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.182265 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.198551 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.227532 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.245693 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.251213 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.251269 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.251278 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.251299 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.251311 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.261872 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.283803 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.308008 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.329296 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.350983 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.354897 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.355088 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.355250 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.355399 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.355575 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.370075 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.394035 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.414652 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.432909 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.454259 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.459137 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.459342 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.459508 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.459679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.459837 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.471152 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.484706 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.512611 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.526874 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.545630 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.563044 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.563113 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.563132 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.563156 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.563247 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.569324 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.589653 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.601333 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.601462 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:45 crc kubenswrapper[4863]: E1205 06:46:45.601657 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:45 crc kubenswrapper[4863]: E1205 06:46:45.601849 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.601951 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:45 crc kubenswrapper[4863]: E1205 06:46:45.602230 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.610072 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.666912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.667017 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.667037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.667061 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.667079 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.770168 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.770241 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.770258 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.770285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.770302 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.872638 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.872678 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.872689 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.872705 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.872717 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.975796 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.975844 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.975856 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.975874 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:45 crc kubenswrapper[4863]: I1205 06:46:45.975887 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:45Z","lastTransitionTime":"2025-12-05T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.047736 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.078941 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.079002 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.079021 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.079049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.079070 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.182340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.182391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.182411 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.182435 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.182453 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.285318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.285393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.285416 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.285447 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.285504 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.388542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.388615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.388632 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.388658 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.388675 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.492339 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.492400 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.492425 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.492455 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.492505 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.595550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.595609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.595626 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.595655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.595672 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.700144 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.700229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.700251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.700281 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.700303 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.802784 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.802875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.802901 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.802934 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.802958 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.905972 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.906029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.906047 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.906070 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:46 crc kubenswrapper[4863]: I1205 06:46:46.906087 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:46Z","lastTransitionTime":"2025-12-05T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.009428 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.009560 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.009585 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.009619 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.009641 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.051200 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.112070 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.112114 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.112132 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.112154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.112171 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.215315 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.215364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.215380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.215402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.215419 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.318329 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.318360 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.318372 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.318390 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.318404 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.421411 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.421557 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.421583 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.421614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.421638 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.524653 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.524701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.524716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.524735 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.524748 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.601798 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.601904 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.601905 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:47 crc kubenswrapper[4863]: E1205 06:46:47.602014 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:47 crc kubenswrapper[4863]: E1205 06:46:47.602229 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:47 crc kubenswrapper[4863]: E1205 06:46:47.602390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.627692 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.627756 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.627774 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.627796 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.627814 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.731671 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.731745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.731772 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.731807 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.731829 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.834377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.834440 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.834457 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.834521 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.834548 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.936998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.937047 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.937059 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.937076 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:47 crc kubenswrapper[4863]: I1205 06:46:47.937090 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:47Z","lastTransitionTime":"2025-12-05T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.040445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.040532 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.040553 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.040578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.040595 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.057448 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/0.log" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.061692 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643" exitCode=1 Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.061761 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.063142 4863 scope.go:117] "RemoveContainer" containerID="2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.083011 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.089995 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.090146 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.090216 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090239 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:47:04.090204716 +0000 UTC m=+51.816201796 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.090282 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.090339 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090352 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090395 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090394 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090417 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090466 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:04.090446122 +0000 UTC m=+51.816443202 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090538 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:04.090509273 +0000 UTC m=+51.816506353 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090617 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090644 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090652 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090679 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090688 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:04.090674787 +0000 UTC m=+51.816671867 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:46:48 crc kubenswrapper[4863]: E1205 06:46:48.090759 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:04.090731328 +0000 UTC m=+51.816728458 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.100410 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.121609 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.143698 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.143745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.143762 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.143785 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.143802 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.148054 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.174569 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.198243 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.235416 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.247899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.247955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.247973 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.247997 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.248017 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.261061 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.277906 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.297536 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.312090 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.330195 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.346453 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.346467 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.351310 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.351347 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.351362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.351383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.351399 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.366927 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.453856 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.453924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.453944 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.453969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.453987 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.557547 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.557611 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.557627 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.557679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.557703 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.660490 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.660528 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.660538 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.660554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.660562 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.763617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.763842 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.763980 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.764148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.764302 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.867777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.867834 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.867850 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.867875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.867891 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.970443 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.970529 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.970547 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.970571 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:48 crc kubenswrapper[4863]: I1205 06:46:48.970591 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:48Z","lastTransitionTime":"2025-12-05T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.068383 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/0.log" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076413 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076639 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076704 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076731 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.076757 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.077098 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.103833 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.124032 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.140991 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.159294 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.170585 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.179221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.179259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.179289 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.179306 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.179317 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.182437 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.200343 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.203766 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.216771 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.231080 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.250088 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.254954 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4"] Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.255817 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.258964 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.260154 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.274909 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.282032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.282093 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.282595 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.287683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.287773 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.297243 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.302010 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.302087 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.302152 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.302224 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hqn9\" (UniqueName: \"kubernetes.io/projected/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-kube-api-access-4hqn9\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.317517 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.334339 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399382 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399458 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399479 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.399694 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.402818 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.402912 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.402969 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hqn9\" (UniqueName: \"kubernetes.io/projected/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-kube-api-access-4hqn9\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.403023 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.403363 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.403587 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.411976 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.412597 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.425288 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hqn9\" (UniqueName: \"kubernetes.io/projected/0204ad42-ba52-4971-9c5a-ed9f949cb7e4-kube-api-access-4hqn9\") pod \"ovnkube-control-plane-749d76644c-2xxk4\" (UID: \"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.426394 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.444502 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.457235 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.470560 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.483961 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.500290 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.501858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.501900 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.501912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.501938 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.501950 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.514441 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.525679 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.542448 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.563603 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.576162 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.577241 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.596522 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.601562 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.601631 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:49 crc kubenswrapper[4863]: E1205 06:46:49.601758 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:49 crc kubenswrapper[4863]: E1205 06:46:49.601906 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.601562 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:49 crc kubenswrapper[4863]: E1205 06:46:49.602331 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.605364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.605404 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.605413 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.605427 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.605438 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.613539 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.707672 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.707730 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.707748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.707773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.707792 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.811360 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.811400 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.811412 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.811429 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.811442 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.913762 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.914066 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.914165 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.914272 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:49 crc kubenswrapper[4863]: I1205 06:46:49.914370 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:49Z","lastTransitionTime":"2025-12-05T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.017523 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.017578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.017595 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.017617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.017633 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.081848 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" event={"ID":"0204ad42-ba52-4971-9c5a-ed9f949cb7e4","Type":"ContainerStarted","Data":"49e5b9631b6429e549f1fdf140f46a8cfd46a22a98dfea761370f6bcfdc8bb43"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.120874 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.120953 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.120978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.121009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.121034 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.224509 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.224572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.224591 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.224615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.224634 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.328062 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.328123 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.328142 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.328166 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.328190 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.431087 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.431152 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.431169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.431194 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.431211 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.534635 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.534708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.534729 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.534765 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.534786 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.637526 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.637581 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.637596 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.637625 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.637638 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.740602 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.740655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.740679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.740708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.740732 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.844505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.844569 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.844586 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.844610 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.844628 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.948287 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.948356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.948373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.948400 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:50 crc kubenswrapper[4863]: I1205 06:46:50.948419 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:50Z","lastTransitionTime":"2025-12-05T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.051423 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.051539 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.051559 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.051583 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.051603 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.088880 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/1.log" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.089843 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/0.log" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.094370 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280" exitCode=1 Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.094461 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.094601 4863 scope.go:117] "RemoveContainer" containerID="2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.095963 4863 scope.go:117] "RemoveContainer" containerID="7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.096514 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.097629 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" event={"ID":"0204ad42-ba52-4971-9c5a-ed9f949cb7e4","Type":"ContainerStarted","Data":"cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.097688 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" event={"ID":"0204ad42-ba52-4971-9c5a-ed9f949cb7e4","Type":"ContainerStarted","Data":"bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.125153 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.155193 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.155239 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.155254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.155277 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.155293 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.157276 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-96nzc"] Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.157839 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.157918 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.163520 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.203032 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.216908 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.220964 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjgks\" (UniqueName: \"kubernetes.io/projected/3943e053-ef4c-4348-98a8-cc1473a197f2-kube-api-access-kjgks\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.221049 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.228534 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.242296 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.252040 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.256796 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.256823 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.256833 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.256847 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.256858 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.272616 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.290048 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.302691 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.315174 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.321952 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.322021 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjgks\" (UniqueName: \"kubernetes.io/projected/3943e053-ef4c-4348-98a8-cc1473a197f2-kube-api-access-kjgks\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.322281 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.322400 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:51.822368604 +0000 UTC m=+39.548365694 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.327086 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.340826 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjgks\" (UniqueName: \"kubernetes.io/projected/3943e053-ef4c-4348-98a8-cc1473a197f2-kube-api-access-kjgks\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.345717 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.359274 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.359341 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.359362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.359380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.359391 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.365229 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.375193 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.386180 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.400237 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.418239 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.434633 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.459702 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.462256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.462308 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.462321 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.462337 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.462348 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.478042 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.493671 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.514272 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.529978 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.562411 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.565110 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.565182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.565208 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.565239 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.565261 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.578368 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.597326 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.600845 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.600900 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.600900 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.601026 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.601213 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.601386 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.619397 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.637012 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.654300 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.667861 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.667911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.667929 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.667953 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.667970 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.673675 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:51Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.771081 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.771146 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.771169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.771200 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.771222 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.827515 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.827718 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:51 crc kubenswrapper[4863]: E1205 06:46:51.827810 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:52.827784353 +0000 UTC m=+40.553781423 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.875192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.875257 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.875273 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.875314 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.875331 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.978307 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.978374 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.978391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.978415 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:51 crc kubenswrapper[4863]: I1205 06:46:51.978435 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:51Z","lastTransitionTime":"2025-12-05T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.081718 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.081782 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.081811 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.081843 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.081863 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.104062 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/1.log" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.184917 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.185009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.185028 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.185092 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.185114 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.288586 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.289071 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.289093 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.289122 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.289146 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.391978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.392047 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.392065 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.392091 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.392116 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.495911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.496001 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.496035 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.496070 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.496095 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.599526 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.599632 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.599654 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.599679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.599696 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.601372 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:52 crc kubenswrapper[4863]: E1205 06:46:52.601695 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.624605 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.643533 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.659340 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.677857 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.691782 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.702318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.702555 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.702577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.702603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.702658 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.712232 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.729549 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.745293 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.761900 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.776904 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.796959 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.804625 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.804686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.804703 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.804730 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.804749 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.825269 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.839029 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:52 crc kubenswrapper[4863]: E1205 06:46:52.839298 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:52 crc kubenswrapper[4863]: E1205 06:46:52.839460 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:54.839421987 +0000 UTC m=+42.565419097 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.840252 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.858451 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.879037 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.899328 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.907766 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.907889 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.907908 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.907935 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:52 crc kubenswrapper[4863]: I1205 06:46:52.907956 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:52Z","lastTransitionTime":"2025-12-05T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.011524 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.011568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.011584 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.011609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.011626 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.114052 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.114100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.114117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.114141 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.114158 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.217727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.217830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.217848 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.217873 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.217889 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.320462 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.320551 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.320574 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.320604 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.320652 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.423326 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.423381 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.423398 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.423421 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.423437 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.525956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.526031 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.526048 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.526075 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.526092 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.600887 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.600929 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.600915 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:53 crc kubenswrapper[4863]: E1205 06:46:53.601083 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:53 crc kubenswrapper[4863]: E1205 06:46:53.601217 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:53 crc kubenswrapper[4863]: E1205 06:46:53.601320 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.629403 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.629467 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.629527 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.629559 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.629591 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.732799 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.732853 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.732871 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.732894 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.732911 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.836241 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.836322 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.836340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.836366 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.836383 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.939828 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.939895 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.939919 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.939947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:53 crc kubenswrapper[4863]: I1205 06:46:53.939968 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:53Z","lastTransitionTime":"2025-12-05T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.043219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.043304 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.043327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.043357 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.043382 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.145998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.146072 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.146095 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.146123 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.146142 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.249798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.249859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.249875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.249899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.249918 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.353464 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.353567 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.353591 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.353619 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.353645 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.430927 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.430974 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.430989 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.431013 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.431030 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.450947 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.456005 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.456063 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.456084 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.456107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.456125 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.475562 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.480280 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.480339 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.480356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.480377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.480397 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.501007 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.505340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.505391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.505407 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.505427 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.505445 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.525163 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.529969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.530021 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.530040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.530061 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.530078 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.549554 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.549774 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.551716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.551814 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.551834 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.551857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.551873 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.601205 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.601372 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.669133 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.669197 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.669216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.669238 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.669255 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.772709 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.772773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.772794 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.772817 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.772836 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.858706 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.858924 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:54 crc kubenswrapper[4863]: E1205 06:46:54.859030 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:46:58.859003757 +0000 UTC m=+46.585000827 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.875324 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.875372 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.875389 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.875409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.875426 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.978715 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.978764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.978781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.978804 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:54 crc kubenswrapper[4863]: I1205 06:46:54.978822 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:54Z","lastTransitionTime":"2025-12-05T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.082534 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.082584 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.082601 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.082621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.082636 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.185776 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.185870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.185891 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.185916 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.185975 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.288653 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.288733 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.288747 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.288764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.288777 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.392305 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.392364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.392381 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.392404 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.392423 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.495698 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.495751 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.495768 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.495789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.495806 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.599569 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.599635 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.599655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.599679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.599701 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.600843 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.600891 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:55 crc kubenswrapper[4863]: E1205 06:46:55.601008 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.600858 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:55 crc kubenswrapper[4863]: E1205 06:46:55.601165 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:55 crc kubenswrapper[4863]: E1205 06:46:55.601304 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.703366 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.703436 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.703463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.703543 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.703573 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.807860 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.807905 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.807923 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.807944 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.807961 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.911330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.911390 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.911409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.911434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:55 crc kubenswrapper[4863]: I1205 06:46:55.911452 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:55Z","lastTransitionTime":"2025-12-05T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.014290 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.014355 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.014373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.014398 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.014417 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.117562 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.117616 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.117633 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.117658 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.117676 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.220694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.221161 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.221395 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.221648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.221801 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.325202 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.325574 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.325801 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.326026 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.326219 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.429123 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.429485 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.429778 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.430006 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.430247 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.533253 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.533312 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.533323 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.533340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.533763 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.601858 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:56 crc kubenswrapper[4863]: E1205 06:46:56.602410 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.635844 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.635897 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.635915 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.635936 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.635955 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.739732 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.739998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.740158 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.740288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.740404 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.843891 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.843987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.844007 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.844063 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.844082 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.946622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.946666 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.946693 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.946708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:56 crc kubenswrapper[4863]: I1205 06:46:56.946720 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:56Z","lastTransitionTime":"2025-12-05T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.049873 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.049936 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.049952 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.049977 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.049995 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.152502 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.152554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.152573 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.152597 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.152616 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.255540 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.255570 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.255583 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.255616 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.255626 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.358131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.358212 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.358235 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.358275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.358298 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.461456 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.461912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.462093 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.462302 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.462525 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.566846 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.566930 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.566952 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.566986 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.567020 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.601084 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.601271 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:57 crc kubenswrapper[4863]: E1205 06:46:57.601381 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:57 crc kubenswrapper[4863]: E1205 06:46:57.601267 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.601464 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:57 crc kubenswrapper[4863]: E1205 06:46:57.601610 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.669832 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.669907 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.669924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.669947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.669964 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.773890 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.773963 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.773985 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.774010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.774027 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.876716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.876781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.876802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.876827 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.876844 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.979939 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.979993 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.980010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.980036 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:57 crc kubenswrapper[4863]: I1205 06:46:57.980055 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:57Z","lastTransitionTime":"2025-12-05T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.083337 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.083409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.083431 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.083460 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.083522 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.187266 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.187328 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.187345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.187369 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.187386 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.290909 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.290956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.290973 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.290997 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.291014 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.394606 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.394663 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.394679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.394702 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.394719 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.498321 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.498373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.498388 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.498409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.498426 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.600879 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:58 crc kubenswrapper[4863]: E1205 06:46:58.601101 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.602913 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.602973 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.602997 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.603090 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.603117 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.706442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.706562 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.706582 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.706608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.706626 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.809910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.809969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.809985 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.810008 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.810026 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.903447 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:46:58 crc kubenswrapper[4863]: E1205 06:46:58.903700 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:58 crc kubenswrapper[4863]: E1205 06:46:58.903832 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:06.90379901 +0000 UTC m=+54.629796080 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.913158 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.913209 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.913226 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.913254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:58 crc kubenswrapper[4863]: I1205 06:46:58.913271 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:58Z","lastTransitionTime":"2025-12-05T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.016622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.016699 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.016722 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.016751 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.016772 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.120107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.120178 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.120196 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.120221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.120253 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.223251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.223316 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.223333 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.223358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.223376 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.326358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.326421 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.326438 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.326500 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.326518 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.429895 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.429946 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.429963 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.429986 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.430012 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.533110 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.533176 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.533193 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.533219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.533235 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.601870 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.602023 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:46:59 crc kubenswrapper[4863]: E1205 06:46:59.602224 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.602264 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:46:59 crc kubenswrapper[4863]: E1205 06:46:59.602415 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:46:59 crc kubenswrapper[4863]: E1205 06:46:59.602721 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.636854 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.636907 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.636924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.636947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.636967 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.740310 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.740394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.740412 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.740437 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.740454 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.843085 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.843215 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.843236 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.843265 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.843282 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.946755 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.946863 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.946883 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.946907 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:46:59 crc kubenswrapper[4863]: I1205 06:46:59.946925 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:46:59Z","lastTransitionTime":"2025-12-05T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.050135 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.050203 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.050222 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.050248 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.050266 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.153174 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.153230 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.153247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.153269 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.153286 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.255987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.256023 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.256034 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.256053 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.256065 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.358511 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.358786 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.358921 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.359051 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.359166 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.462400 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.462513 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.462540 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.462598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.462616 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.566388 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.566440 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.566456 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.566509 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.566527 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.601985 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:00 crc kubenswrapper[4863]: E1205 06:47:00.602195 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.669285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.669359 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.669382 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.669417 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.669439 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.772694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.772759 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.772779 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.772804 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.772823 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.875695 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.875742 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.875754 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.875770 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.875781 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.978800 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.978860 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.978876 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.978904 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:00 crc kubenswrapper[4863]: I1205 06:47:00.978919 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:00Z","lastTransitionTime":"2025-12-05T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.083020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.083081 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.083102 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.083128 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.083146 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.187062 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.187144 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.187163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.187188 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.187211 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.296048 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.296184 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.296203 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.296234 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.296261 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.399682 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.399762 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.399785 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.399813 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.399835 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.503763 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.503834 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.503858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.503885 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.503926 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.601845 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.601973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.602000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:01 crc kubenswrapper[4863]: E1205 06:47:01.602160 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:01 crc kubenswrapper[4863]: E1205 06:47:01.602306 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:01 crc kubenswrapper[4863]: E1205 06:47:01.602940 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.606919 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.606960 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.606974 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.606993 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.607006 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.710700 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.710761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.710778 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.710802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.710819 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.814647 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.814711 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.814735 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.814764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.814791 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.917461 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.917573 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.917594 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.917619 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:01 crc kubenswrapper[4863]: I1205 06:47:01.917637 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:01Z","lastTransitionTime":"2025-12-05T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.020621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.020676 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.020693 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.020716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.020734 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.123450 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.123826 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.124009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.124154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.124296 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.227576 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.227631 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.227650 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.227674 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.227696 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.331154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.331211 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.331229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.331253 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.331275 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.434063 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.434123 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.434139 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.434162 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.434216 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.540393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.540498 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.540521 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.540551 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.540578 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.601142 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:02 crc kubenswrapper[4863]: E1205 06:47:02.601346 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.620405 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.643402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.643662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.643835 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.643972 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.644100 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.653160 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e8c136b478842aedb5ec3412c685fffea334ec609d841c951d681c7216f1643\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:47Z\\\",\\\"message\\\":\\\" 6188 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:46:46.474874 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 06:46:46.474919 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 06:46:46.474977 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 06:46:46.475011 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 06:46:46.474979 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 06:46:46.475110 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 06:46:46.475117 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 06:46:46.475069 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:46:46.475164 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:46:46.475169 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 06:46:46.475214 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:46:46.475244 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:46:46.475230 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 06:46:46.475295 6188 factory.go:656] Stopping watch factory\\\\nI1205 06:46:46.475331 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:46:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.676789 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.697976 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.719663 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.735444 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.747096 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.747141 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.747230 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.747289 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.747306 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.757674 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.776534 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.794644 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.810924 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.831408 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.850089 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.850141 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.850160 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.850182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.850198 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.852894 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.881784 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.900703 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.917911 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.940461 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:02Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.953280 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.953339 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.953356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.953381 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:02 crc kubenswrapper[4863]: I1205 06:47:02.953398 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:02Z","lastTransitionTime":"2025-12-05T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.056977 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.057034 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.057051 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.057074 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.057091 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.160442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.160549 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.160568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.160596 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.160613 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.263247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.263297 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.263314 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.263338 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.263355 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.366334 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.366396 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.366414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.366439 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.366456 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.469337 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.469408 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.469426 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.469870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.469928 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.572787 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.572841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.572858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.572883 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.572901 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.602046 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.602113 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.602307 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:03 crc kubenswrapper[4863]: E1205 06:47:03.602462 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:03 crc kubenswrapper[4863]: E1205 06:47:03.603075 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:03 crc kubenswrapper[4863]: E1205 06:47:03.603230 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.604594 4863 scope.go:117] "RemoveContainer" containerID="7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.625523 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.641286 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.664714 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.675267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.675327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.675344 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.675371 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.675388 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.685987 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.708937 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.729515 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.741757 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.755563 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.772671 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.777295 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.777340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.777357 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.777380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.777397 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.785515 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.814835 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.835007 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.848294 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.866940 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.880355 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.880414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.880438 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.880465 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.880533 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.885693 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.900463 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:03Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.982344 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.982387 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.982399 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.982417 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:03 crc kubenswrapper[4863]: I1205 06:47:03.982429 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:03Z","lastTransitionTime":"2025-12-05T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.061984 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.080264 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.082975 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.084614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.084664 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.084680 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.084701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.084718 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.097799 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.127503 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.147242 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.153367 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/1.log" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.156277 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.157349 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.160865 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.160996 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161046 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:47:36.161018477 +0000 UTC m=+83.887015547 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.161099 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161121 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161162 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.161188 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161204 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.161231 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161236 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161301 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161317 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161330 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161340 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161345 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:36.161332374 +0000 UTC m=+83.887329434 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161373 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:36.161359354 +0000 UTC m=+83.887356504 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161401 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:36.161383305 +0000 UTC m=+83.887380425 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.161429 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:36.161421586 +0000 UTC m=+83.887418636 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.172614 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.186952 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.187024 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.187042 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.187065 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.187081 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.188246 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.201619 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.215008 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.227318 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.243970 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.262265 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.279881 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.290105 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.290143 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.290155 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.290172 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.290184 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.296682 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.310172 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.323811 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.332875 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.344050 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.355650 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.367812 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.379444 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.391387 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.392676 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.392750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.392774 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.392809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.392835 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.408887 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.423880 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.433025 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.445954 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.458077 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.468574 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.480246 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.489123 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.494816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.494866 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.494881 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.494898 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.494909 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.500051 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.510580 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.519914 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.539534 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.597748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.597782 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.597793 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.597807 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.597817 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.601287 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.601390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.700391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.700440 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.700449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.700465 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.700493 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.717904 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.717937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.717951 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.717966 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.717978 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.728698 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.733767 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.733792 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.733800 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.733813 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.733821 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.752410 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.757233 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.757267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.757275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.757290 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.757299 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.780887 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.788131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.788208 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.788233 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.788306 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.788326 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.807528 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.812213 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.812322 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.812342 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.812366 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.812384 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.832218 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:04Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:04 crc kubenswrapper[4863]: E1205 06:47:04.832627 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.835142 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.835195 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.835216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.835240 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.835257 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.938019 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.938090 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.938134 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.938168 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:04 crc kubenswrapper[4863]: I1205 06:47:04.938192 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:04Z","lastTransitionTime":"2025-12-05T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.042390 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.042450 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.042499 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.042533 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.042559 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.146568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.146637 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.146655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.146683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.146700 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.162902 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/2.log" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.163894 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/1.log" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.168747 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" exitCode=1 Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.168802 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.168853 4863 scope.go:117] "RemoveContainer" containerID="7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.169985 4863 scope.go:117] "RemoveContainer" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" Dec 05 06:47:05 crc kubenswrapper[4863]: E1205 06:47:05.170308 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.192104 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.218659 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.237221 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.250722 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.250786 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.250804 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.250829 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.250846 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.254889 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.276882 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.297047 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.327456 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a13c30b24c00b614baf570afd420baeb851e00a95bf3d4eb372971ba1c17280\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:46:50Z\\\",\\\"message\\\":\\\"I1205 06:46:49.821891 6324 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:46:49.821884 6324 factory.go:656] Stopping watch factory\\\\nI1205 06:46:49.821816 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821886 6324 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:46:49.821975 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.821939 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822051 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822253 6324 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822642 6324 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:46:49.822749 6324 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.345707 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.353560 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.353598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.353614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.353637 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.353654 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.362079 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.387594 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.408723 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.432167 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.452656 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.456890 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.456934 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.456953 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.456977 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.456995 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.474814 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.500089 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.522543 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.538414 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:05Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.560958 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.561028 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.561053 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.561085 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.561111 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.601657 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.601794 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:05 crc kubenswrapper[4863]: E1205 06:47:05.601913 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.601925 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:05 crc kubenswrapper[4863]: E1205 06:47:05.602059 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:05 crc kubenswrapper[4863]: E1205 06:47:05.602179 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.664104 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.664163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.664187 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.664213 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.664235 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.767903 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.767987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.768011 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.768039 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.768062 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.871164 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.871233 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.871254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.871285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.871307 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.975121 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.975171 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.975187 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.975211 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:05 crc kubenswrapper[4863]: I1205 06:47:05.975229 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:05Z","lastTransitionTime":"2025-12-05T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.077405 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.077434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.077442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.077456 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.077467 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.175088 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/2.log" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.179691 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.179968 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.180226 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.180437 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.180790 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.180597 4863 scope.go:117] "RemoveContainer" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" Dec 05 06:47:06 crc kubenswrapper[4863]: E1205 06:47:06.181716 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.194364 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.207670 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.221865 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.247617 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.272463 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.283249 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.283303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.283322 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.283350 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.283369 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.289090 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.302372 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.312806 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.327574 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.345659 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.358407 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.374641 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385273 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385665 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385713 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385730 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.385761 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.395463 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.407439 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.422227 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.452653 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:06Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.487383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.487414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.487421 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.487436 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.487444 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.590571 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.590636 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.590658 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.590687 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.590708 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.601328 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:06 crc kubenswrapper[4863]: E1205 06:47:06.601553 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.694135 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.694227 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.694247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.694273 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.694291 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.797884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.797952 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.797969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.797991 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.798008 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.901599 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.901661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.901680 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.901702 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.901719 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:06Z","lastTransitionTime":"2025-12-05T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:06 crc kubenswrapper[4863]: I1205 06:47:06.991790 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:06 crc kubenswrapper[4863]: E1205 06:47:06.991956 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:06 crc kubenswrapper[4863]: E1205 06:47:06.992036 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:22.992014154 +0000 UTC m=+70.718011224 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.004795 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.005043 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.005251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.005414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.005585 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.108798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.108836 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.108844 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.108860 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.108869 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.212298 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.212344 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.212357 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.212374 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.212387 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.315806 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.315866 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.315883 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.315908 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.315927 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.418789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.418851 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.418870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.418897 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.418917 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.522229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.522318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.522345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.522377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.522406 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.601755 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.601963 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.602158 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:07 crc kubenswrapper[4863]: E1205 06:47:07.602201 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:07 crc kubenswrapper[4863]: E1205 06:47:07.601985 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:07 crc kubenswrapper[4863]: E1205 06:47:07.602408 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.625640 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.625721 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.625744 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.625772 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.625793 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.729101 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.729181 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.729200 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.729225 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.729248 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.832274 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.832578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.832591 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.832604 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.832615 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.935353 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.935422 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.935440 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.935465 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:07 crc kubenswrapper[4863]: I1205 06:47:07.935530 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:07Z","lastTransitionTime":"2025-12-05T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.038342 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.038391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.038403 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.038425 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.038437 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.141578 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.141644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.141667 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.141692 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.141710 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.244526 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.244594 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.244623 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.244653 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.244678 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.348178 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.348244 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.348262 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.348289 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.348306 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.450558 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.450615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.450654 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.450684 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.450726 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.553721 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.553785 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.553808 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.553838 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.553859 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.601304 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:08 crc kubenswrapper[4863]: E1205 06:47:08.601529 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.656078 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.656130 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.656148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.656169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.656187 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.759005 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.759077 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.759100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.759128 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.759154 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.862120 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.862189 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.862208 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.862271 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.862291 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.964929 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.964987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.965008 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.965036 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:08 crc kubenswrapper[4863]: I1205 06:47:08.965054 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:08Z","lastTransitionTime":"2025-12-05T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.068829 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.068884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.068902 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.068938 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.068955 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.172422 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.172908 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.173055 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.173185 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.173310 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.276300 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.276353 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.276369 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.276392 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.276411 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.379521 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.379557 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.379568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.379584 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.379593 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.483126 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.483183 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.483204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.483231 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.483249 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.586111 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.586174 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.586191 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.586216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.586234 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.601684 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.601743 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.601802 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:09 crc kubenswrapper[4863]: E1205 06:47:09.601984 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:09 crc kubenswrapper[4863]: E1205 06:47:09.602097 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:09 crc kubenswrapper[4863]: E1205 06:47:09.602236 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.688940 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.689024 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.689040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.689065 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.689085 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.792615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.792665 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.792683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.792706 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.792724 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.899809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.899862 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.899879 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.899902 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:09 crc kubenswrapper[4863]: I1205 06:47:09.899923 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:09Z","lastTransitionTime":"2025-12-05T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.002786 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.002843 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.002859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.002882 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.002899 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.106004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.106080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.106105 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.106135 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.106155 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.209117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.209165 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.209177 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.209195 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.209210 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.312694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.312729 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.312767 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.312803 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.312817 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.415879 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.415925 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.415936 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.415953 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.415965 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.518648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.518710 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.518727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.518752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.518769 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.601822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:10 crc kubenswrapper[4863]: E1205 06:47:10.602027 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.621552 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.621609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.621626 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.621649 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.621667 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.725101 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.725158 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.725174 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.725197 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.725215 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.828336 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.828402 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.828424 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.828455 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.828506 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.931731 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.931970 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.932020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.932059 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:10 crc kubenswrapper[4863]: I1205 06:47:10.932081 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:10Z","lastTransitionTime":"2025-12-05T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.034947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.035004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.035025 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.035050 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.035068 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.138859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.138915 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.138933 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.138956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.138971 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.241335 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.241379 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.241390 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.241407 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.241420 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.343728 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.343783 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.343795 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.343818 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.343831 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.446704 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.446745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.446756 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.446769 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.446778 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.550069 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.550137 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.550155 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.550182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.550201 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.601973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.602029 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.602026 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:11 crc kubenswrapper[4863]: E1205 06:47:11.602351 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:11 crc kubenswrapper[4863]: E1205 06:47:11.602672 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:11 crc kubenswrapper[4863]: E1205 06:47:11.602822 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.654036 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.654098 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.654115 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.654140 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.654158 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.763725 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.763821 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.763842 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.763870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.763898 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.867427 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.867535 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.867555 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.867579 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.867598 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.970868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.971581 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.971614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.971640 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:11 crc kubenswrapper[4863]: I1205 06:47:11.971756 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:11Z","lastTransitionTime":"2025-12-05T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.074862 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.074927 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.074945 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.074969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.074989 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.178419 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.178499 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.178512 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.178530 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.178541 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.281338 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.281401 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.281420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.281445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.281463 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.385110 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.385420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.385627 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.385769 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.385903 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.489323 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.489391 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.489414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.489446 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.489503 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.592947 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.593010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.593028 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.593052 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.593069 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.601085 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:12 crc kubenswrapper[4863]: E1205 06:47:12.601235 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.621619 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.637771 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.670915 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.695657 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.695915 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.695951 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.695967 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.695987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.696002 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.716358 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.737611 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.758354 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.776734 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.796281 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.799161 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.799204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.799219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.799241 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.799259 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.816819 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.832505 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.856207 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.877349 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.898007 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.902972 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.903067 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.903086 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.903113 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.903131 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:12Z","lastTransitionTime":"2025-12-05T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.920672 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.937181 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:12 crc kubenswrapper[4863]: I1205 06:47:12.952942 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:12Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.005387 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.005436 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.005448 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.005464 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.005501 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.108442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.108551 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.108573 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.108603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.108623 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.211788 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.211845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.211861 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.211884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.211901 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.315332 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.315378 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.315394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.315416 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.315433 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.418149 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.418213 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.418231 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.418256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.418275 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.521322 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.521394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.521416 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.521445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.521501 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.601462 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.601560 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.601562 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:13 crc kubenswrapper[4863]: E1205 06:47:13.601653 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:13 crc kubenswrapper[4863]: E1205 06:47:13.601805 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:13 crc kubenswrapper[4863]: E1205 06:47:13.602039 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.623921 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.623975 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.623990 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.624012 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.624028 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.727445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.727529 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.727542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.727563 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.727575 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.830073 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.830121 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.830132 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.830149 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.830160 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.932908 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.932962 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.932978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.933004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:13 crc kubenswrapper[4863]: I1205 06:47:13.933020 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:13Z","lastTransitionTime":"2025-12-05T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.036271 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.036339 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.036361 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.036392 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.036415 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.138710 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.138772 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.138789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.138812 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.138830 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.242087 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.242155 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.242173 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.242198 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.242222 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.344928 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.344971 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.344987 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.345010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.345027 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.448187 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.448225 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.448238 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.448254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.448264 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.551574 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.551611 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.551622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.551638 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.551650 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.603362 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.603508 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.654039 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.654067 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.654076 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.654089 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.654098 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.756776 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.756819 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.756835 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.756857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.756874 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.859875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.859948 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.859970 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.859997 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.860018 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.866330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.866377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.866399 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.866425 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.866447 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.887107 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:14Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.891199 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.891232 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.891243 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.891259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.891271 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.904558 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:14Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.908686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.908729 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.908747 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.908773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.908791 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.925202 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:14Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.929655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.929734 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.929754 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.929814 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.929834 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.951396 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:14Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.956803 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.956881 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.956899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.956923 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.956977 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.978515 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:14Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:14 crc kubenswrapper[4863]: E1205 06:47:14.978904 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.981795 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.981830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.981840 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.981856 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:14 crc kubenswrapper[4863]: I1205 06:47:14.981867 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:14Z","lastTransitionTime":"2025-12-05T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.084687 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.084731 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.084739 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.084753 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.084762 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.188438 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.188529 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.188547 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.188572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.188592 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.291638 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.291676 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.291688 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.291703 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.291715 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.394982 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.395053 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.395073 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.395096 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.395110 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.497955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.497998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.498015 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.498037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.498054 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600580 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600626 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600641 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600652 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600838 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.600968 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:15 crc kubenswrapper[4863]: E1205 06:47:15.601100 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.601147 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:15 crc kubenswrapper[4863]: E1205 06:47:15.601330 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:15 crc kubenswrapper[4863]: E1205 06:47:15.601871 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.703530 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.703563 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.703576 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.703588 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.703599 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.806341 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.806437 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.806461 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.806525 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.806552 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.909137 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.909206 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.909229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.909261 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:15 crc kubenswrapper[4863]: I1205 06:47:15.909286 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:15Z","lastTransitionTime":"2025-12-05T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.012615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.012660 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.012671 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.012687 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.012699 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.115160 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.115219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.115236 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.115260 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.115278 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.217182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.217220 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.217233 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.217253 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.217267 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.320156 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.320192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.320201 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.320216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.320228 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.422028 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.422435 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.422613 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.422768 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.422918 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.526138 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.526207 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.526224 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.526251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.526267 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.601739 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:16 crc kubenswrapper[4863]: E1205 06:47:16.601887 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.628961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.629364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.629542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.629661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.629779 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.732865 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.732907 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.732919 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.732937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.732950 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.835728 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.835771 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.835781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.835798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.835809 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.938609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.938651 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.938664 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.938680 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:16 crc kubenswrapper[4863]: I1205 06:47:16.938692 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:16Z","lastTransitionTime":"2025-12-05T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.040797 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.041171 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.041259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.041335 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.041434 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.143974 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.144029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.144041 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.144059 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.144073 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.246381 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.246449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.246506 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.246534 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.246553 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.349206 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.349245 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.349254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.349270 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.349281 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.451920 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.452009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.452024 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.452044 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.452055 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.554720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.554759 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.554773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.554788 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.554797 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.601266 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:17 crc kubenswrapper[4863]: E1205 06:47:17.601390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.601267 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.601535 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:17 crc kubenswrapper[4863]: E1205 06:47:17.601673 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:17 crc kubenswrapper[4863]: E1205 06:47:17.601860 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.657265 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.657291 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.657304 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.657317 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.657329 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.759809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.759859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.759871 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.759889 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.759902 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.862764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.862790 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.862798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.862809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.862818 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.965042 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.965080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.965096 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.965116 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:17 crc kubenswrapper[4863]: I1205 06:47:17.965132 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:17Z","lastTransitionTime":"2025-12-05T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.070720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.070788 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.070805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.070829 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.070858 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.173321 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.174009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.174083 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.174144 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.174210 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.276074 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.276103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.276112 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.276125 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.276134 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.379114 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.379171 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.379189 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.379211 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.379231 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.521611 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.523781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.524901 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.525319 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.525738 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.601718 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:18 crc kubenswrapper[4863]: E1205 06:47:18.601854 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.628755 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.628805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.628818 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.628838 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.628850 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.731184 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.731219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.731229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.731246 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.731258 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.834570 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.834877 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.835010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.835170 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.835304 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.937837 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.937875 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.937888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.937904 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:18 crc kubenswrapper[4863]: I1205 06:47:18.937916 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:18Z","lastTransitionTime":"2025-12-05T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.040492 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.040831 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.041034 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.041225 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.041359 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.143771 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.144436 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.144620 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.144769 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.144913 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.246926 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.246980 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.246989 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.247007 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.247017 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.349242 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.349279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.349287 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.349303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.349313 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.451508 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.451809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.451965 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.452111 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.452245 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.555301 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.555568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.555693 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.555789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.555870 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.601166 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:19 crc kubenswrapper[4863]: E1205 06:47:19.601290 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.601877 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.601886 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:19 crc kubenswrapper[4863]: E1205 06:47:19.602178 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:19 crc kubenswrapper[4863]: E1205 06:47:19.602184 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.658401 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.658438 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.658446 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.658463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.658485 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.761035 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.761104 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.761127 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.761157 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.761179 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.871977 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.872010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.872018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.872032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.872042 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.974765 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.974825 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.974841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.974877 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:19 crc kubenswrapper[4863]: I1205 06:47:19.974897 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:19Z","lastTransitionTime":"2025-12-05T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.078530 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.078608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.078631 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.078657 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.078691 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.180924 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.180970 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.180988 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.181011 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.181030 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.283148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.283652 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.283744 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.283807 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.283864 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.386235 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.386652 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.386789 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.386928 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.387055 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.489699 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.489735 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.489748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.489763 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.489774 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.592369 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.592404 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.592413 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.592428 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.592438 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.601597 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:20 crc kubenswrapper[4863]: E1205 06:47:20.601791 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.695505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.695575 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.695594 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.695616 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.695632 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.798183 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.798654 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.799057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.799275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.799570 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.902542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.902575 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.902587 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.902603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:20 crc kubenswrapper[4863]: I1205 06:47:20.902615 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:20Z","lastTransitionTime":"2025-12-05T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.004870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.005284 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.005449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.005656 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.005818 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.108721 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.109002 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.109091 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.109176 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.109268 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.212504 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.212550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.212562 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.212581 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.212593 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.315779 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.315813 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.315822 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.315836 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.315845 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.417838 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.417865 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.417876 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.417888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.417896 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.520190 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.520435 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.520530 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.520641 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.520707 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.600989 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.601027 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:21 crc kubenswrapper[4863]: E1205 06:47:21.601126 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:21 crc kubenswrapper[4863]: E1205 06:47:21.601276 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.601375 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:21 crc kubenswrapper[4863]: E1205 06:47:21.601710 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.602651 4863 scope.go:117] "RemoveContainer" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" Dec 05 06:47:21 crc kubenswrapper[4863]: E1205 06:47:21.602995 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.622257 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.622444 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.622577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.622603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.622612 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.725042 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.725327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.725413 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.725520 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.725602 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.827816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.827858 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.827868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.827886 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.827897 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.929649 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.929718 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.929736 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.929761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:21 crc kubenswrapper[4863]: I1205 06:47:21.929779 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:21Z","lastTransitionTime":"2025-12-05T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.033157 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.033215 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.033232 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.033254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.033271 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.136139 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.136168 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.136176 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.136189 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.136198 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.238719 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.238774 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.238790 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.238814 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.238831 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.340884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.341361 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.341431 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.341530 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.341597 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.444207 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.444254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.444269 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.444288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.444302 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.546575 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.546674 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.546696 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.546721 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.546737 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.601354 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:22 crc kubenswrapper[4863]: E1205 06:47:22.603159 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.631435 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.642615 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.649270 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.649310 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.649328 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.649353 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.649369 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.655560 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.671863 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.689589 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.702209 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.719347 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.736176 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.753763 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.753842 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.753865 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.753894 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.753916 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.758014 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.777954 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.791425 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.812705 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.831833 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.848334 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.857278 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.857338 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.857360 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.857387 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.857408 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.862160 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.876835 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.895069 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:22Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.959965 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.960004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.960056 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.960074 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:22 crc kubenswrapper[4863]: I1205 06:47:22.960084 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:22Z","lastTransitionTime":"2025-12-05T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.003524 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:23 crc kubenswrapper[4863]: E1205 06:47:23.003656 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:23 crc kubenswrapper[4863]: E1205 06:47:23.003705 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:47:55.003692518 +0000 UTC m=+102.729689558 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.062519 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.062577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.062608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.062624 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.062634 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.165379 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.165441 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.165458 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.165507 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.165528 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.231682 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/0.log" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.231875 4863 generic.go:334] "Generic (PLEG): container finished" podID="b9e2cdef-4a53-4f32-b973-e5d6ba0708db" containerID="29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4" exitCode=1 Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.231933 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerDied","Data":"29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.232631 4863 scope.go:117] "RemoveContainer" containerID="29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.254831 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.267977 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.268018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.268036 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.268057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.268074 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.272931 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.289292 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.301506 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.311698 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.323332 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.340637 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.349546 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.361525 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.371723 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.371764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.371777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.371794 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.371805 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.376990 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.388141 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.400517 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.411718 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.425771 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.436411 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.445388 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.467522 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:23Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.473826 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.473870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.473882 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.473902 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.473914 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.577000 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.577040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.577050 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.577066 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.577076 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.601326 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.601351 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:23 crc kubenswrapper[4863]: E1205 06:47:23.601502 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.601659 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:23 crc kubenswrapper[4863]: E1205 06:47:23.601716 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:23 crc kubenswrapper[4863]: E1205 06:47:23.601866 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.679695 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.679754 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.679785 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.679809 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.679829 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.782900 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.782966 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.782985 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.783014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.783067 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.886367 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.886423 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.886442 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.886509 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.886538 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.990213 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.990265 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.990281 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.990304 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:23 crc kubenswrapper[4863]: I1205 06:47:23.990320 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:23Z","lastTransitionTime":"2025-12-05T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.093367 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.093507 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.093527 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.093556 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.093574 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.195961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.196019 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.196036 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.196059 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.196077 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.238307 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/0.log" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.238371 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerStarted","Data":"8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.256635 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.275419 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.289759 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.299538 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.299598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.299617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.299644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.299662 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.303510 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.320395 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.338421 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.351624 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.363454 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.378309 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.394324 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.402767 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.402812 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.402822 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.402838 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.402849 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.408831 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.431996 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.447549 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.463166 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.478465 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.490419 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.502596 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:24Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.505103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.505128 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.505154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.505169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.505178 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.601293 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:24 crc kubenswrapper[4863]: E1205 06:47:24.601493 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.607771 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.607830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.607852 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.607880 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.607901 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.710758 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.710810 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.710831 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.710857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.710878 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.813911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.813992 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.814018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.814050 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.814106 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.916950 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.917012 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.917029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.917052 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:24 crc kubenswrapper[4863]: I1205 06:47:24.917069 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:24Z","lastTransitionTime":"2025-12-05T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.021037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.021107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.021131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.021159 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.021181 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.124818 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.124888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.124904 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.124928 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.124946 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.172007 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.172056 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.172109 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.172131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.172147 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.190933 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:25Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.195566 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.195631 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.195643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.195661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.195672 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.214165 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:25Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.217990 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.218032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.218050 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.218073 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.218091 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.232081 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:25Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.236491 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.236515 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.236523 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.236535 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.236542 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.253193 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:25Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.258311 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.258380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.258400 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.258420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.258436 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.271099 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:25Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.271326 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.272962 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.273032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.273046 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.273064 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.273079 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.375957 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.376016 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.376033 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.376057 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.376076 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.479720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.479891 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.479911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.479935 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.479952 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.583295 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.583378 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.583401 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.583434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.583460 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.601680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.601731 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.601680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.601886 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.601961 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:25 crc kubenswrapper[4863]: E1205 06:47:25.602057 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.687064 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.687688 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.687720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.687748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.687768 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.791033 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.791131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.791149 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.791172 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.791191 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.894540 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.894608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.894625 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.894648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.894665 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.996608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.996667 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.996684 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.996706 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:25 crc kubenswrapper[4863]: I1205 06:47:25.996724 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:25Z","lastTransitionTime":"2025-12-05T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.099403 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.099457 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.099514 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.099544 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.099561 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.203251 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.203314 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.203331 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.203354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.203371 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.306533 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.306592 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.306609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.306633 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.306651 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.409383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.409452 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.409508 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.409542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.409563 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.512937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.513012 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.513032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.513062 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.513085 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.601826 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:26 crc kubenswrapper[4863]: E1205 06:47:26.602057 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.615939 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.616000 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.616017 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.616040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.616057 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.719292 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.719332 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.719343 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.719358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.719370 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.824967 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.825020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.825035 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.825056 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.825072 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.927621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.927658 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.927670 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.927686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:26 crc kubenswrapper[4863]: I1205 06:47:26.927697 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:26Z","lastTransitionTime":"2025-12-05T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.030061 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.030127 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.030143 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.030172 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.030192 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.133589 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.133649 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.133667 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.133692 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.133709 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.236563 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.236610 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.236626 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.236648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.236665 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.339264 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.339330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.339352 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.339379 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.339397 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.442244 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.442279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.442290 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.442307 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.442318 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.545806 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.545877 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.545899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.545926 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.545946 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.601875 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.601913 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.601884 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:27 crc kubenswrapper[4863]: E1205 06:47:27.602076 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:27 crc kubenswrapper[4863]: E1205 06:47:27.602352 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:27 crc kubenswrapper[4863]: E1205 06:47:27.602412 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.648965 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.649032 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.649100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.649132 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.649182 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.751776 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.751837 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.751856 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.751880 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.751898 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.859018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.859093 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.859115 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.859145 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.859166 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.962433 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.962542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.962567 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.962598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:27 crc kubenswrapper[4863]: I1205 06:47:27.962622 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:27Z","lastTransitionTime":"2025-12-05T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.065235 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.065294 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.065313 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.065339 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.065358 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.167884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.167939 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.167959 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.167983 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.168001 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.271431 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.271546 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.271572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.271604 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.271627 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.374002 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.374060 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.374077 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.374100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.374121 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.477148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.477201 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.477218 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.477243 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.477262 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.579912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.579981 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.579998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.580025 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.580042 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.601353 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:28 crc kubenswrapper[4863]: E1205 06:47:28.601577 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.683633 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.683677 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.683688 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.683704 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.683715 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.788937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.789016 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.789040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.789072 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.789097 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.892143 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.892204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.892222 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.892243 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.892260 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.995365 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.995416 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.995434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.995457 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:28 crc kubenswrapper[4863]: I1205 06:47:28.995533 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:28Z","lastTransitionTime":"2025-12-05T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.098661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.098705 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.098720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.098743 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.098759 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.201894 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.202833 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.203062 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.203208 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.203336 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.306050 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.306151 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.306180 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.306210 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.306233 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.409800 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.409853 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.409870 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.409893 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.409910 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.512582 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.513409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.513595 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.513752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.513888 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.601139 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.601197 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:29 crc kubenswrapper[4863]: E1205 06:47:29.601333 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:29 crc kubenswrapper[4863]: E1205 06:47:29.601509 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.601888 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:29 crc kubenswrapper[4863]: E1205 06:47:29.602136 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.617377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.617607 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.617769 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.617927 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.618068 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.721167 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.721528 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.721717 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.721916 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.722116 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.825346 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.825451 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.825516 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.825548 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.825568 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.927978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.928041 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.928058 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.928082 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:29 crc kubenswrapper[4863]: I1205 06:47:29.928100 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:29Z","lastTransitionTime":"2025-12-05T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.032140 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.032215 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.032236 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.032267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.032287 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.135668 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.135746 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.135770 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.135800 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.135822 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.239262 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.239753 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.239905 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.240152 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.240331 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.344142 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.344215 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.344238 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.344267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.344287 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.447118 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.447181 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.447198 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.447221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.447239 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.550104 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.550945 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.551107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.551252 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.551416 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.601918 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:30 crc kubenswrapper[4863]: E1205 06:47:30.602117 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.653998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.654065 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.654087 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.654112 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.654130 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.757587 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.757655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.757678 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.757708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.757729 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.861580 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.861643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.861661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.861687 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.861713 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.964859 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.964929 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.964956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.964986 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:30 crc kubenswrapper[4863]: I1205 06:47:30.965010 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:30Z","lastTransitionTime":"2025-12-05T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.068600 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.068675 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.068701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.068732 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.068753 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.171446 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.171537 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.171553 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.171577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.171643 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.274119 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.274181 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.274199 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.274225 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.274243 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.377868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.377931 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.377942 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.377961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.377973 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.480377 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.480436 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.480452 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.480501 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.480518 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.583076 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.584074 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.584275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.584537 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.584838 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.600882 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.600980 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.601314 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:31 crc kubenswrapper[4863]: E1205 06:47:31.601549 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:31 crc kubenswrapper[4863]: E1205 06:47:31.601921 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:31 crc kubenswrapper[4863]: E1205 06:47:31.602178 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.688285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.688331 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.688343 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.688361 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.688374 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.791750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.791811 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.791828 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.791853 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.791871 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.895713 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.895798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.895824 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.895854 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.895876 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.999357 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.999450 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.999516 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.999554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:31 crc kubenswrapper[4863]: I1205 06:47:31.999580 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:31Z","lastTransitionTime":"2025-12-05T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.103665 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.103742 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.103762 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.103787 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.103805 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.207496 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.207550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.207566 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.207589 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.207606 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.309874 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.309932 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.309954 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.309982 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.310002 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.445303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.445911 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.446063 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.446186 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.446321 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.549554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.549755 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.549897 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.550014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.550121 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.601117 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:32 crc kubenswrapper[4863]: E1205 06:47:32.601359 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.619912 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.636208 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.653774 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.653826 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.653839 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.653857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.653870 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.655661 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.672028 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.685850 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.705875 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.720087 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.745280 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.756439 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.756487 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.756497 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.756510 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.756519 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.765541 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.783550 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.801878 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.815816 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.833433 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.851529 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.858796 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.859068 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.859279 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.859536 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.859743 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.867578 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.883149 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.900722 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:32Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.962425 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.962519 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.962543 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.962571 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:32 crc kubenswrapper[4863]: I1205 06:47:32.962595 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:32Z","lastTransitionTime":"2025-12-05T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.065566 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.065646 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.065670 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.065702 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.065725 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.168074 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.168129 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.168148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.168172 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.168191 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.270428 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.270463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.270505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.270521 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.270533 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.373293 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.373367 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.373386 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.373414 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.373435 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.476383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.476445 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.476462 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.476518 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.476535 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.579122 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.579185 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.579204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.579229 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.579246 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.601680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:33 crc kubenswrapper[4863]: E1205 06:47:33.601865 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.601904 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.601921 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:33 crc kubenswrapper[4863]: E1205 06:47:33.602438 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:33 crc kubenswrapper[4863]: E1205 06:47:33.602683 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.603024 4863 scope.go:117] "RemoveContainer" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.682049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.682143 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.682195 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.682224 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.682241 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.786625 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.787041 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.787100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.787126 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.787144 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.890561 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.890670 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.890698 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.890741 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.890774 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.993673 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.993736 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.993755 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.993787 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:33 crc kubenswrapper[4863]: I1205 06:47:33.993808 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:33Z","lastTransitionTime":"2025-12-05T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.097060 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.097154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.097182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.097218 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.097244 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.200865 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.200934 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.200953 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.201166 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.201187 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.275172 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/2.log" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.277021 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.278056 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.296422 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.303310 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.303333 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.303341 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.303354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.303362 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.307292 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.316237 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.330347 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.339658 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.348623 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.360203 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.368747 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.379972 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.388493 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.397104 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.406275 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.406333 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.406354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.406382 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.406402 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.412590 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.424284 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.437641 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.449238 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.461112 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.472628 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:34Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.508385 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.508412 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.508420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.508435 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.508444 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.603808 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:34 crc kubenswrapper[4863]: E1205 06:47:34.603897 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.610340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.610362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.610370 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.610380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.610389 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.712590 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.712613 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.712621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.712640 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.712652 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.814457 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.814499 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.814508 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.814521 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.814530 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.917605 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.917655 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.917671 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.917691 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:34 crc kubenswrapper[4863]: I1205 06:47:34.917705 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:34Z","lastTransitionTime":"2025-12-05T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.020099 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.020146 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.020163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.020185 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.020203 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.122567 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.122608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.122627 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.122648 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.122664 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.224296 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.224324 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.224337 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.224354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.224367 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.326592 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.326629 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.326644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.326660 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.326673 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.429273 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.429327 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.429345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.429370 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.429387 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.495221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.495427 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.495572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.495694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.495802 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.512694 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:35Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.516841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.516910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.516930 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.516955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.516973 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.532389 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:35Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.537022 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.537080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.537099 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.537125 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.537146 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.560334 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:35Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.564919 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.564967 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.564985 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.565009 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.565034 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.584671 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:35Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.588667 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.588712 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.588725 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.588745 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.588758 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.600975 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.601049 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.601105 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.601184 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.601293 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.601518 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.604955 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:35Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:35 crc kubenswrapper[4863]: E1205 06:47:35.605226 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.609816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.609902 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.609925 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.609949 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.609967 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.615402 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.712815 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.712846 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.712861 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.712882 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.712897 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.815002 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.815070 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.815091 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.815117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.815137 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.917518 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.917839 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.918029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.918255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:35 crc kubenswrapper[4863]: I1205 06:47:35.918418 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:35Z","lastTransitionTime":"2025-12-05T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.021409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.021664 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.021899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.022177 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.022594 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.125609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.125955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.126119 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.126292 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.126411 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.182035 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.182134 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.182180 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.182220 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.182256 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182424 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182447 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182464 4863 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182560 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:40.182539443 +0000 UTC m=+147.908536523 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182638 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:40.182625385 +0000 UTC m=+147.908622455 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182713 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182731 4863 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182746 4863 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182782 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:40.182770408 +0000 UTC m=+147.908767488 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182832 4863 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182866 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:40.18285565 +0000 UTC m=+147.908852720 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182931 4863 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.182967 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:40.182956052 +0000 UTC m=+147.908953122 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.228131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.228179 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.228195 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.228216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.228232 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.331085 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.331140 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.331159 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.331182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.331199 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.434071 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.434114 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.434131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.434153 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.434170 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.537295 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.537356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.537373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.537395 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.537410 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.601299 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:36 crc kubenswrapper[4863]: E1205 06:47:36.601535 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.639778 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.640505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.640694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.640840 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.640959 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.743299 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.743346 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.743362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.743384 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.743401 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.845644 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.845677 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.845686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.845699 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.845708 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.947920 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.947979 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.947999 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.948023 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:36 crc kubenswrapper[4863]: I1205 06:47:36.948039 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:36Z","lastTransitionTime":"2025-12-05T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.050550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.050618 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.050637 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.050662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.050682 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.153098 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.153140 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.153151 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.153169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.153180 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.256192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.256269 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.256286 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.256310 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.256329 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.288906 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/3.log" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.290120 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/2.log" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.294163 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" exitCode=1 Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.294225 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.294277 4863 scope.go:117] "RemoveContainer" containerID="d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.295597 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:47:37 crc kubenswrapper[4863]: E1205 06:47:37.295955 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.319512 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.340648 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.359884 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.359937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.359956 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.359978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.359996 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.360540 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.379824 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.398083 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.418630 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.438454 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.454605 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.463788 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.463855 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.463872 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.463899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.463934 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.480189 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.504549 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.524203 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.545266 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.563385 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.568077 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.568561 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.568866 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.569107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.569288 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.580897 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.598431 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0550e845-371c-48d4-a16d-dab40c42e36b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2aed5a46f944c7cf02ef5b14d140f1a620df805de2f3bd613aaed7882ac48b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.601638 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.601739 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:37 crc kubenswrapper[4863]: E1205 06:47:37.602774 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:37 crc kubenswrapper[4863]: E1205 06:47:37.602876 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.601755 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:37 crc kubenswrapper[4863]: E1205 06:47:37.603011 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.619692 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.636324 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.668731 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:36Z\\\",\\\"message\\\":\\\"b.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620159 6926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620222 6926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620257 6926 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 06:47:35.620423 6926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620598 6926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620836 6926 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620931 6926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621070 6926 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621719 6926 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:37Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.672240 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.672291 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.672308 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.672330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.672345 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.775230 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.775283 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.775296 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.775315 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.775332 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.878044 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.878102 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.878122 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.878147 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.878164 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.981091 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.981154 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.981174 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.981202 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:37 crc kubenswrapper[4863]: I1205 06:47:37.981219 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:37Z","lastTransitionTime":"2025-12-05T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.083940 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.084161 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.084326 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.084454 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.084652 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.188134 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.188204 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.188221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.188250 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.188267 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.291582 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.291681 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.291700 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.291723 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.291743 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.301386 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/3.log" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.394750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.394843 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.394861 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.394889 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.394910 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.497297 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.497362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.497379 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.497403 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.497422 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.600285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.600346 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.600364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.600394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.600412 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.601138 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:38 crc kubenswrapper[4863]: E1205 06:47:38.601303 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.703752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.703816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.703832 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.703857 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.703875 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.806886 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.806992 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.807015 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.807038 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.807054 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.910463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.910568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.910589 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.910615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:38 crc kubenswrapper[4863]: I1205 06:47:38.910632 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:38Z","lastTransitionTime":"2025-12-05T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.013643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.013720 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.013747 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.013777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.013800 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.116724 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.116788 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.116805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.116828 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.116845 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.220838 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.220900 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.220918 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.220943 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.220960 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.322945 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.322995 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.323014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.323037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.323055 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.425715 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.425830 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.425843 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.426186 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.426278 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.529727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.530020 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.530037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.530061 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.530079 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.600947 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.601019 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.600947 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:39 crc kubenswrapper[4863]: E1205 06:47:39.601168 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:39 crc kubenswrapper[4863]: E1205 06:47:39.601296 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:39 crc kubenswrapper[4863]: E1205 06:47:39.601457 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.633160 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.633216 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.633234 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.633256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.633274 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.736712 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.738046 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.738254 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.738453 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.738675 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.841768 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.842068 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.842201 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.842338 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.842466 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.945418 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.945677 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.945851 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.945978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:39 crc kubenswrapper[4863]: I1205 06:47:39.946153 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:39Z","lastTransitionTime":"2025-12-05T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.049700 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.049758 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.049775 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.049798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.049815 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.152210 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.152278 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.152306 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.152332 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.152349 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.255269 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.255345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.255362 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.255387 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.255406 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.359013 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.359090 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.359117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.359155 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.359182 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.462738 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.462803 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.462822 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.462846 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.462864 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.565631 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.565710 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.565733 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.565761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.565778 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.601644 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:40 crc kubenswrapper[4863]: E1205 06:47:40.601818 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.668568 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.668647 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.668680 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.668709 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.668730 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.771827 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.771888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.771910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.771938 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.771963 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.875011 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.875080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.875103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.875131 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.875149 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.978052 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.978117 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.978137 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.978163 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:40 crc kubenswrapper[4863]: I1205 06:47:40.978182 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:40Z","lastTransitionTime":"2025-12-05T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.080707 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.080760 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.080778 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.080802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.080821 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.184223 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.184270 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.184287 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.184313 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.184329 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.287255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.287318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.287336 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.287358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.287377 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.390300 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.390364 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.390388 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.390417 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.390439 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.493677 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.493743 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.493764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.493792 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.493814 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.596865 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.596940 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.596964 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.596993 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.597015 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.601545 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.601651 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:41 crc kubenswrapper[4863]: E1205 06:47:41.601710 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.601743 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:41 crc kubenswrapper[4863]: E1205 06:47:41.602190 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:41 crc kubenswrapper[4863]: E1205 06:47:41.602506 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.621336 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.700841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.700931 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.700955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.701005 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.701030 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.803766 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.803824 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.803841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.803868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.803885 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.907220 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.907288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.907358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.907388 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:41 crc kubenswrapper[4863]: I1205 06:47:41.907407 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:41Z","lastTransitionTime":"2025-12-05T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.010669 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.010722 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.010740 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.010764 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.010781 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.113199 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.113681 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.113921 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.114162 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.114372 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.218345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.218467 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.218512 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.218537 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.218565 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.321461 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.321538 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.321555 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.321577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.321593 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.425334 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.425382 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.425398 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.425420 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.425438 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.528353 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.528424 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.528448 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.528510 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.528535 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.601534 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:42 crc kubenswrapper[4863]: E1205 06:47:42.602158 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.619067 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.631802 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.631849 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.631864 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.631881 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.631894 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.649430 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d872911609750a3a1238486a7955c139af17639b5c0f0531a945f4ee29766791\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"message\\\":\\\"opping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:04.618387 6521 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.618538 6521 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:04.619183 6521 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 06:47:04.619230 6521 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 06:47:04.619238 6521 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 06:47:04.619263 6521 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 06:47:04.619296 6521 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 06:47:04.619306 6521 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 06:47:04.619323 6521 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 06:47:04.619329 6521 factory.go:656] Stopping watch factory\\\\nI1205 06:47:04.619331 6521 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 06:47:04.619342 6521 ovnkube.go:599] Stopped ovnkube\\\\nI1205 06:47:04.619339 6521 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 06:47:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:36Z\\\",\\\"message\\\":\\\"b.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620159 6926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620222 6926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620257 6926 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 06:47:35.620423 6926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620598 6926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620836 6926 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620931 6926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621070 6926 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621719 6926 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.665224 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0550e845-371c-48d4-a16d-dab40c42e36b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2aed5a46f944c7cf02ef5b14d140f1a620df805de2f3bd613aaed7882ac48b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.684907 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.707621 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.727523 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.734401 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.734839 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.735000 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.735192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.735363 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.746063 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.767290 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.788041 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.805691 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.826823 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.838394 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.838431 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.838455 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.838492 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.838505 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.846672 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.861621 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.875641 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.897563 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.908795 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.919606 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.941052 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.941080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.941089 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.941121 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.941131 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:42Z","lastTransitionTime":"2025-12-05T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.946235 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2d54629-747f-4d49-8770-fa72b7c92580\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55151f6865e758a63876b64c3c06d1b05333826e68dfc1e9d9a4e54711edd14d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb29c89aa50dfb3017e82df15b78a424dd0effd55cdff9bf098ce6406147f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7c0caacf023bdb67ff30ecbac2a9b09b5e42040946f5d6221aee23351d9b8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f783473338bf6df0069d590ca882361192cb61febb25c0a3924d59a1837157d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a95fa1429b4d9be6b72b49ec46b8f53eeb53345ca9756c97c25f438f41b26fbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:42 crc kubenswrapper[4863]: I1205 06:47:42.964140 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:42Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.044084 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.044393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.044589 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.044726 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.044859 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.147283 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.147337 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.147354 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.147379 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.147398 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.255701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.256120 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.256285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.256456 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.256683 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.359783 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.359846 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.359864 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.359888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.359907 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.462627 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.462680 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.462697 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.462723 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.462740 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.565999 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.566056 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.566073 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.566096 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.566113 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.601044 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.601150 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:43 crc kubenswrapper[4863]: E1205 06:47:43.601210 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.601291 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:43 crc kubenswrapper[4863]: E1205 06:47:43.601343 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:43 crc kubenswrapper[4863]: E1205 06:47:43.601518 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.669627 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.669683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.669701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.669727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.669743 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.771944 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.771994 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.772010 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.772029 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.772045 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.874847 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.874918 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.874937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.874961 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.874981 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.978069 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.978127 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.978146 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.978169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:43 crc kubenswrapper[4863]: I1205 06:47:43.978185 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:43Z","lastTransitionTime":"2025-12-05T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.081256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.081333 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.081358 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.081393 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.081421 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.184459 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.184557 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.184583 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.184616 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.184640 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.287763 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.287822 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.287840 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.287864 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.287881 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.390906 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.391219 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.391359 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.391533 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.391695 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.494459 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.494533 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.494549 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.494571 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.494588 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.597324 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.597412 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.597437 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.597505 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.597537 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.601692 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:44 crc kubenswrapper[4863]: E1205 06:47:44.601815 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.700570 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.700618 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.700635 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.700657 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.700675 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.803716 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.803752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.803766 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.803782 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.803791 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.906724 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.906786 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.906811 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.906841 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:44 crc kubenswrapper[4863]: I1205 06:47:44.906864 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:44Z","lastTransitionTime":"2025-12-05T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.010054 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.010116 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.010134 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.010160 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.010179 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.112206 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.112244 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.112255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.112271 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.112282 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.215359 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.215419 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.215439 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.215463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.215506 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.319201 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.319237 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.319249 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.319267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.319279 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.422356 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.422415 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.422434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.422463 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.422518 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.525515 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.525577 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.525594 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.525619 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.525635 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.601225 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.601324 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.601386 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.601517 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.601649 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.601772 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.628430 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.628535 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.628561 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.628584 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.628601 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.731904 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.731975 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.731992 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.732018 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.732035 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.823761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.823825 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.823842 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.823868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.823886 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.846288 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.854100 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.854148 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.854168 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.854196 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.854218 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.873277 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.878182 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.878260 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.878284 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.878315 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.878340 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.900571 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.905407 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.905529 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.905554 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.905585 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.905608 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.926371 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.932003 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.932048 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.932063 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.932087 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.932104 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.951656 4863 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f49cfa4-de7c-4c8e-a3ce-17df066aabb4\\\",\\\"systemUUID\\\":\\\"dc36ee67-6abe-4278-a52c-0ad78d90ac7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:45Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:45 crc kubenswrapper[4863]: E1205 06:47:45.952097 4863 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.954047 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.954103 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.954122 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.954150 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:45 crc kubenswrapper[4863]: I1205 06:47:45.954174 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:45Z","lastTransitionTime":"2025-12-05T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.057733 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.057787 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.057805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.057829 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.057847 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.160448 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.160752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.160845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.160942 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.161043 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.263775 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.263937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.263966 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.264049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.264123 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.367134 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.367192 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.367211 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.367236 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.367253 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.470423 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.470547 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.470569 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.470600 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.470624 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.573447 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.573520 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.573537 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.573559 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.573576 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.601621 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:46 crc kubenswrapper[4863]: E1205 06:47:46.601770 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.676200 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.676542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.676902 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.677093 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.677239 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.779938 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.779994 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.780014 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.780040 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.780060 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.888287 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.888422 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.888449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.888519 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.888542 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.991643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.991711 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.991727 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.991750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:46 crc kubenswrapper[4863]: I1205 06:47:46.991769 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:46Z","lastTransitionTime":"2025-12-05T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.094943 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.095004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.095021 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.095046 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.095065 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.198617 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.198725 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.198748 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.198773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.198792 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.301037 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.301153 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.301171 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.301193 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.301208 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.403595 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.403658 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.403675 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.403702 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.403719 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.506262 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.506306 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.506315 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.506333 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.506345 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.600879 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.600918 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.601001 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:47 crc kubenswrapper[4863]: E1205 06:47:47.601167 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:47 crc kubenswrapper[4863]: E1205 06:47:47.601353 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:47 crc kubenswrapper[4863]: E1205 06:47:47.601453 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.609449 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.609511 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.609523 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.609545 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.609558 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.711827 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.711888 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.711906 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.711930 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.711947 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.814614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.814679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.814696 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.814721 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.814739 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.917718 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.917791 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.917815 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.917842 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:47 crc kubenswrapper[4863]: I1205 06:47:47.917858 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:47Z","lastTransitionTime":"2025-12-05T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.031191 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.031242 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.031259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.031282 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.031299 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.134713 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.134776 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.134793 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.134816 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.134833 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.238119 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.238232 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.238252 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.238324 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.238344 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.343832 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.343903 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.343923 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.343959 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.343984 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.447404 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.447466 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.447513 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.447536 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.447554 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.550202 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.550256 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.550293 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.550318 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.550337 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.601950 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:48 crc kubenswrapper[4863]: E1205 06:47:48.602182 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.653283 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.653344 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.653360 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.653385 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.653406 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.756153 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.756214 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.756230 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.756255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.756273 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.859608 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.859681 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.859706 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.859739 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.859760 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.963295 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.963348 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.963368 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.963389 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:48 crc kubenswrapper[4863]: I1205 06:47:48.963406 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:48Z","lastTransitionTime":"2025-12-05T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.065868 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.065935 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.065958 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.065988 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.066009 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.168882 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.168946 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.168969 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.169001 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.169027 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.272325 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.272395 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.272411 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.272443 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.272462 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.374701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.374759 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.374775 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.374800 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.374818 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.477157 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.477227 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.477247 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.477272 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.477289 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.579915 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.579962 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.579978 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.579998 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.580015 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.601797 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:49 crc kubenswrapper[4863]: E1205 06:47:49.601957 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.602085 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.602085 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:49 crc kubenswrapper[4863]: E1205 06:47:49.602256 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:49 crc kubenswrapper[4863]: E1205 06:47:49.602324 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.683109 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.683156 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.683169 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.683188 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.683201 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.785230 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.785263 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.785274 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.785288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.785300 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.888548 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.888622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.888643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.888666 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.888684 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.991609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.991666 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.991684 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.991708 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:49 crc kubenswrapper[4863]: I1205 06:47:49.991726 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:49Z","lastTransitionTime":"2025-12-05T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.094559 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.094609 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.094624 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.094646 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.094664 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.197060 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.197096 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.197107 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.197121 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.197131 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.299776 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.299823 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.299855 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.299878 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.299894 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.402203 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.402243 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.402252 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.402267 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.402277 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.505299 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.505363 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.505380 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.505406 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.505422 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.601410 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:50 crc kubenswrapper[4863]: E1205 06:47:50.601627 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.602665 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:47:50 crc kubenswrapper[4863]: E1205 06:47:50.602909 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.607615 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.607662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.607679 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.607701 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.607717 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.617729 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.640137 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.653791 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.673165 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.688266 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.701378 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.710792 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.710831 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.710867 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.710893 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.710909 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.738205 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2d54629-747f-4d49-8770-fa72b7c92580\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55151f6865e758a63876b64c3c06d1b05333826e68dfc1e9d9a4e54711edd14d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb29c89aa50dfb3017e82df15b78a424dd0effd55cdff9bf098ce6406147f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7c0caacf023bdb67ff30ecbac2a9b09b5e42040946f5d6221aee23351d9b8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f783473338bf6df0069d590ca882361192cb61febb25c0a3924d59a1837157d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a95fa1429b4d9be6b72b49ec46b8f53eeb53345ca9756c97c25f438f41b26fbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.767260 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.793200 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.812142 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.813461 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.813503 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.813513 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.813538 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.813546 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.828628 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0550e845-371c-48d4-a16d-dab40c42e36b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2aed5a46f944c7cf02ef5b14d140f1a620df805de2f3bd613aaed7882ac48b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.844846 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.857749 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.886978 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:36Z\\\",\\\"message\\\":\\\"b.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620159 6926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620222 6926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620257 6926 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 06:47:35.620423 6926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620598 6926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620836 6926 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620931 6926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621070 6926 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621719 6926 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.904395 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.916525 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.916611 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.916662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.916686 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.916703 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:50Z","lastTransitionTime":"2025-12-05T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.924314 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.942858 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.961332 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:50 crc kubenswrapper[4863]: I1205 06:47:50.980268 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:50Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.019712 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.019761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.019777 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.019803 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.019821 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.122773 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.122829 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.122845 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.122869 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.122886 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.226060 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.226180 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.226198 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.226224 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.226241 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.328460 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.328537 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.328550 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.328572 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.328586 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.431753 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.431840 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.431864 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.431899 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.431932 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.535136 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.535203 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.535220 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.535246 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.535264 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.601031 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.601054 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:51 crc kubenswrapper[4863]: E1205 06:47:51.601368 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.601086 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:51 crc kubenswrapper[4863]: E1205 06:47:51.601504 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:51 crc kubenswrapper[4863]: E1205 06:47:51.601643 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.638662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.638750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.638774 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.638805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.638827 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.742373 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.742421 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.742434 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.742452 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.742482 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.845726 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.845769 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.845781 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.845798 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.845810 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.948560 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.948624 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.948638 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.948656 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:51 crc kubenswrapper[4863]: I1205 06:47:51.948669 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:51Z","lastTransitionTime":"2025-12-05T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.052041 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.052097 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.052114 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.052136 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.052155 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.155285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.155351 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.155369 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.155395 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.155411 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.258614 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.258676 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.258694 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.258719 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.258736 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.361314 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.361387 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.361409 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.361441 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.361463 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.464288 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.464330 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.464340 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.464355 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.464366 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.566357 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.566426 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.566508 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.566533 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.566550 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.602744 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:52 crc kubenswrapper[4863]: E1205 06:47:52.603017 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.618775 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0550e845-371c-48d4-a16d-dab40c42e36b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2aed5a46f944c7cf02ef5b14d140f1a620df805de2f3bd613aaed7882ac48b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed068a96b7856f66d53d310952e0e54cf2169b130d20b64c9c14143e412b231\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.634179 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f0eb64d6bf209187ab757ffdcec1f1affdb73e19bbeaba43587b914e0832d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.643592 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9ctrk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c8fa48f-3360-4c3a-b4e8-958a84ee6a9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271e23db89f5350b0c8d4c5aecac8140585ac9687b88b5c6e08def5aacb06e2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vd42p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9ctrk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.669126 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.669164 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.669174 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.669190 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.669203 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.670451 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e873158-22c6-4eab-9cb1-438b0f50f46d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:36Z\\\",\\\"message\\\":\\\"b.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620159 6926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620222 6926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 06:47:35.620257 6926 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 06:47:35.620423 6926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620598 6926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620836 6926 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.620931 6926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621070 6926 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 06:47:35.621719 6926 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:47:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q27nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xjcxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.689382 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"732c26ef-fae7-4af4-b953-7a5177dc6d33\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acc34f2db6413afc78cab69e56ee77d22df5c9aeed5c8f692e5ac8ef87915cb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33b99050af85415f799b738aa08c7e39a3e179c3aa0f1ddb4a66cdc8ba73dd0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d48bed7488606abddafc95f403289f4ce7ec10699fc55e9dcdf15e154b4106c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.707931 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.725862 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.745671 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://798731aa70a10b48dbe19415ea0952803696ca693d219ac6e0c4b4cab574e06e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4ffd8463256bf5c5ae9ba339c31dea29854a772b47e5b3c567b1ab7d778f21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.761953 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6dd012a-040f-4504-9866-21443f9165d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a0d113f1c0a2aeb85b0f993bf72525f7cacfaf089d55ae77e4033d2c47d45a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-58b62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnkj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.771633 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.771973 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.772004 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.772089 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.772122 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.780429 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12ab2753-41b3-4f2e-ac07-abc8263ded25\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea2fcd9031902a631bbc0ded505fcbb141ed849a40a9682c6ea872d376931303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32f4ba7eb5245543493f60eb75f9338e2fb85f21c5d08a320c8823b6569b4b68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90854bdc967781f82c7a147788af27595202715af87fb2bfd9b54a43549b2b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96658d5fd7de8640f9200e47928a5a1052f1cc860945b64ce555e425cbb2e978\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.794319 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.806264 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tn7hx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4aafc06e-b11d-4ae3-af3c-f3597b19bc3a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da32af13eba011a0cb42bb6f814cb677ced9b084eb6c99587509ee2b0fbbd287\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9ct2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tn7hx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.818089 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-96nzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3943e053-ef4c-4348-98a8-cc1473a197f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kjgks\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-96nzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.846520 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2d54629-747f-4d49-8770-fa72b7c92580\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55151f6865e758a63876b64c3c06d1b05333826e68dfc1e9d9a4e54711edd14d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb29c89aa50dfb3017e82df15b78a424dd0effd55cdff9bf098ce6406147f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7c0caacf023bdb67ff30ecbac2a9b09b5e42040946f5d6221aee23351d9b8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f783473338bf6df0069d590ca882361192cb61febb25c0a3924d59a1837157d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a95fa1429b4d9be6b72b49ec46b8f53eeb53345ca9756c97c25f438f41b26fbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d79d499a1fe9154768ea7fa7e7176b6752bc18d32612018e4efbcbb9615c32af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a11e7b2eb6edad23fb658eb0bc32d29845aaf56d2bdd56409826bd3a016f6e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://082b9466465c072d6ab6aeeba83b115c76e537be6554ad339b665e030f238edf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.864543 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T06:46:30Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 06:46:25.004057 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 06:46:25.005303 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1272893953/tls.crt::/tmp/serving-cert-1272893953/tls.key\\\\\\\"\\\\nI1205 06:46:30.660560 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 06:46:30.669106 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 06:46:30.669139 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 06:46:30.669196 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 06:46:30.669215 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 06:46:30.678658 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 06:46:30.678702 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678714 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 06:46:30.678725 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 06:46:30.678733 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 06:46:30.678742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 06:46:30.678749 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 06:46:30.678824 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 06:46:30.682243 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:12Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.874975 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.875019 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.875031 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.875049 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.875065 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.877809 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e350d92a7b0b417852b0977c3908af93f45aa027bb253ca3220f5599f111fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.894100 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vw8fd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9e2cdef-4a53-4f32-b973-e5d6ba0708db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:47:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T06:47:23Z\\\",\\\"message\\\":\\\"2025-12-05T06:46:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e\\\\n2025-12-05T06:46:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_efd2e41c-1481-4802-8584-294f0224425e to /host/opt/cni/bin/\\\\n2025-12-05T06:46:38Z [verbose] multus-daemon started\\\\n2025-12-05T06:46:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T06:47:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:47:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b66zm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vw8fd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.915372 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-whgzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c97ee70-4a26-46eb-9b38-0c53ff2189a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d1a8c5709cab4210162c8e1d4d6823e0ca8eb58d504117518944477446307e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7859af7f52e0bce9c9dab2e0b01f089a4471c47cc06257eb60e8bf7db3f09ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://520dc9ba47c17ad36d9891e29b50a5115eabb564f0b973895358aa2e5b00c498\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f0b35b19c1ad464c76e3443406f4d042a45c3763b52813c683417483017461\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6bbf5d297ab1a4f7a4e7d20f72d8d0ba9712753054f710ef8f23dde6b52fc60\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0364fc8c6452731d247d00e12bd71c928c6e5eaf5d00b21a49d569b105ddc28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://591004d9dc5b2a20c71cd2457bf747bd422b4e5497e30e67ae90966fa5ef24d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sccxh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-whgzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.926837 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0204ad42-ba52-4971-9c5a-ed9f949cb7e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T06:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bca874ebd5f0b09a7fcbd6e269de110ef67c5b09289090c21ac0de03694b4cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23b0041b200d9ef324f05db8897822878b61cabfa4b6ed8149b6f778c825dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hqn9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T06:46:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2xxk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T06:47:52Z is after 2025-08-24T17:21:41Z" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.977853 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.977910 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.977926 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.977950 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:52 crc kubenswrapper[4863]: I1205 06:47:52.977968 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:52Z","lastTransitionTime":"2025-12-05T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.080964 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.081023 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.081041 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.081064 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.081083 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.184139 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.184201 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.184221 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.184245 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.184263 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.287066 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.287115 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.287132 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.287155 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.287174 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.389652 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.389712 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.389729 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.389752 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.389773 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.492586 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.492645 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.492661 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.492683 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.492701 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.595662 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.595741 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.595760 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.595785 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.595803 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.601307 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.601363 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:53 crc kubenswrapper[4863]: E1205 06:47:53.601652 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.601718 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:53 crc kubenswrapper[4863]: E1205 06:47:53.602124 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:53 crc kubenswrapper[4863]: E1205 06:47:53.602329 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.699021 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.699112 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.699134 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.699159 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.699177 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.801981 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.802042 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.802111 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.802190 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.802217 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.904682 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.904731 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.904750 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.904772 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:53 crc kubenswrapper[4863]: I1205 06:47:53.904788 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:53Z","lastTransitionTime":"2025-12-05T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.007675 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.007731 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.007747 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.007770 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.007787 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.110855 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.110937 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.110960 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.110993 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.111021 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.214296 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.214366 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.214385 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.214413 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.214430 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.317466 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.317549 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.317565 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.317588 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.317604 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.420704 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.420761 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.420780 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.420805 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.420824 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.523423 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.523542 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.523567 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.523598 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.523621 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.602412 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:54 crc kubenswrapper[4863]: E1205 06:47:54.603070 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.626025 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.626245 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.626385 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.626546 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.626674 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.729890 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.729954 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.729971 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.729994 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.730012 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.833152 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.833672 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.833914 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.834112 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.834309 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.937522 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.937916 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.938183 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.938383 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:54 crc kubenswrapper[4863]: I1205 06:47:54.938625 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:54Z","lastTransitionTime":"2025-12-05T06:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.043187 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.043259 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.043277 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.043303 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.043325 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.096379 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:55 crc kubenswrapper[4863]: E1205 06:47:55.096943 4863 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:55 crc kubenswrapper[4863]: E1205 06:47:55.097180 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs podName:3943e053-ef4c-4348-98a8-cc1473a197f2 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:59.097149078 +0000 UTC m=+166.823146158 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs") pod "network-metrics-daemon-96nzc" (UID: "3943e053-ef4c-4348-98a8-cc1473a197f2") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.146603 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.146936 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.147080 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.147236 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.147418 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.250556 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.250605 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.250621 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.250643 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.250659 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.359211 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.359299 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.359317 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.359345 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.359366 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.464189 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.464255 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.464276 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.464302 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.464319 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.567145 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.567228 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.567252 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.567285 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.567309 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.600905 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.600917 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.601045 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:55 crc kubenswrapper[4863]: E1205 06:47:55.601379 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:55 crc kubenswrapper[4863]: E1205 06:47:55.601536 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:55 crc kubenswrapper[4863]: E1205 06:47:55.601584 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.670226 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.670291 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.670309 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.670332 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.670350 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.773839 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.773895 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.773912 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.773934 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.773950 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.878890 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.878955 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.878974 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.879002 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.879019 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.982622 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.982733 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.982759 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.982790 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:55 crc kubenswrapper[4863]: I1205 06:47:55.982814 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:55Z","lastTransitionTime":"2025-12-05T06:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.046704 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.046775 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.046794 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.046821 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.046838 4863 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T06:47:56Z","lastTransitionTime":"2025-12-05T06:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.120911 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h"] Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.124291 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.131059 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.131868 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.131958 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.132317 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.155077 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=86.155049017 podStartE2EDuration="1m26.155049017s" podCreationTimestamp="2025-12-05 06:46:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.154913324 +0000 UTC m=+103.880910394" watchObservedRunningTime="2025-12-05 06:47:56.155049017 +0000 UTC m=+103.881046097" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.209810 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cc33cd8e-368a-4de3-a19e-2067f2bc7795-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.209905 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.209978 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.210066 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cc33cd8e-368a-4de3-a19e-2067f2bc7795-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.210117 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc33cd8e-368a-4de3-a19e-2067f2bc7795-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.246625 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podStartSLOduration=80.246565709 podStartE2EDuration="1m20.246565709s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.245933874 +0000 UTC m=+103.971930974" watchObservedRunningTime="2025-12-05 06:47:56.246565709 +0000 UTC m=+103.972562789" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.293951 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.293921287 podStartE2EDuration="52.293921287s" podCreationTimestamp="2025-12-05 06:47:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.271912206 +0000 UTC m=+103.997909256" watchObservedRunningTime="2025-12-05 06:47:56.293921287 +0000 UTC m=+104.019918367" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.304963 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-tn7hx" podStartSLOduration=80.30491102 podStartE2EDuration="1m20.30491102s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.304392589 +0000 UTC m=+104.030389639" watchObservedRunningTime="2025-12-05 06:47:56.30491102 +0000 UTC m=+104.030908100" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.310779 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc33cd8e-368a-4de3-a19e-2067f2bc7795-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.310842 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cc33cd8e-368a-4de3-a19e-2067f2bc7795-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.310862 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.310886 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.310920 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cc33cd8e-368a-4de3-a19e-2067f2bc7795-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.311835 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cc33cd8e-368a-4de3-a19e-2067f2bc7795-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.312523 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.312570 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cc33cd8e-368a-4de3-a19e-2067f2bc7795-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.320652 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc33cd8e-368a-4de3-a19e-2067f2bc7795-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.338315 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cc33cd8e-368a-4de3-a19e-2067f2bc7795-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hfb8h\" (UID: \"cc33cd8e-368a-4de3-a19e-2067f2bc7795\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.389510 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=15.38945035 podStartE2EDuration="15.38945035s" podCreationTimestamp="2025-12-05 06:47:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.388550509 +0000 UTC m=+104.114547599" watchObservedRunningTime="2025-12-05 06:47:56.38945035 +0000 UTC m=+104.115447430" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.412294 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=86.412273689 podStartE2EDuration="1m26.412273689s" podCreationTimestamp="2025-12-05 06:46:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.412193537 +0000 UTC m=+104.138190617" watchObservedRunningTime="2025-12-05 06:47:56.412273689 +0000 UTC m=+104.138270739" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.450587 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.484812 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-whgzt" podStartSLOduration=80.4847945 podStartE2EDuration="1m20.4847945s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.48434583 +0000 UTC m=+104.210342910" watchObservedRunningTime="2025-12-05 06:47:56.4847945 +0000 UTC m=+104.210791540" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.485629 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vw8fd" podStartSLOduration=80.48561949 podStartE2EDuration="1m20.48561949s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.451614591 +0000 UTC m=+104.177611671" watchObservedRunningTime="2025-12-05 06:47:56.48561949 +0000 UTC m=+104.211616530" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.526829 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2xxk4" podStartSLOduration=80.526809104 podStartE2EDuration="1m20.526809104s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.506467872 +0000 UTC m=+104.232464952" watchObservedRunningTime="2025-12-05 06:47:56.526809104 +0000 UTC m=+104.252806154" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.526983 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=21.526978838 podStartE2EDuration="21.526978838s" podCreationTimestamp="2025-12-05 06:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.5266135 +0000 UTC m=+104.252610540" watchObservedRunningTime="2025-12-05 06:47:56.526978838 +0000 UTC m=+104.252975888" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.583867 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-9ctrk" podStartSLOduration=80.583843666 podStartE2EDuration="1m20.583843666s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:56.555825797 +0000 UTC m=+104.281822847" watchObservedRunningTime="2025-12-05 06:47:56.583843666 +0000 UTC m=+104.309840736" Dec 05 06:47:56 crc kubenswrapper[4863]: I1205 06:47:56.600874 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:56 crc kubenswrapper[4863]: E1205 06:47:56.600990 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:57 crc kubenswrapper[4863]: I1205 06:47:57.381643 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" event={"ID":"cc33cd8e-368a-4de3-a19e-2067f2bc7795","Type":"ContainerStarted","Data":"4a9b87233f1426dee22f567eb2b96401893a334e701e00c413b0b499f0e26a91"} Dec 05 06:47:57 crc kubenswrapper[4863]: I1205 06:47:57.381698 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" event={"ID":"cc33cd8e-368a-4de3-a19e-2067f2bc7795","Type":"ContainerStarted","Data":"2f12377242c825f05a887617eda6bf3969fc2175e3caa04f33b8efb0314197f5"} Dec 05 06:47:57 crc kubenswrapper[4863]: I1205 06:47:57.601301 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:57 crc kubenswrapper[4863]: I1205 06:47:57.601459 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:57 crc kubenswrapper[4863]: E1205 06:47:57.601887 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:57 crc kubenswrapper[4863]: I1205 06:47:57.601529 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:57 crc kubenswrapper[4863]: E1205 06:47:57.602124 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:47:57 crc kubenswrapper[4863]: E1205 06:47:57.602466 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:58 crc kubenswrapper[4863]: I1205 06:47:58.601713 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:47:58 crc kubenswrapper[4863]: E1205 06:47:58.602294 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:47:59 crc kubenswrapper[4863]: I1205 06:47:59.601257 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:47:59 crc kubenswrapper[4863]: I1205 06:47:59.601358 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:47:59 crc kubenswrapper[4863]: I1205 06:47:59.601269 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:47:59 crc kubenswrapper[4863]: E1205 06:47:59.601523 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:47:59 crc kubenswrapper[4863]: E1205 06:47:59.601735 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:47:59 crc kubenswrapper[4863]: E1205 06:47:59.601888 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:00 crc kubenswrapper[4863]: I1205 06:48:00.600961 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:00 crc kubenswrapper[4863]: E1205 06:48:00.601122 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:01 crc kubenswrapper[4863]: I1205 06:48:01.601007 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:01 crc kubenswrapper[4863]: I1205 06:48:01.601036 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:01 crc kubenswrapper[4863]: E1205 06:48:01.601192 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:01 crc kubenswrapper[4863]: I1205 06:48:01.601246 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:01 crc kubenswrapper[4863]: E1205 06:48:01.601454 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:01 crc kubenswrapper[4863]: E1205 06:48:01.601845 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:02 crc kubenswrapper[4863]: I1205 06:48:02.605008 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:02 crc kubenswrapper[4863]: E1205 06:48:02.605544 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:03 crc kubenswrapper[4863]: I1205 06:48:03.600996 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:03 crc kubenswrapper[4863]: I1205 06:48:03.601071 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:03 crc kubenswrapper[4863]: I1205 06:48:03.601103 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:03 crc kubenswrapper[4863]: E1205 06:48:03.601196 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:03 crc kubenswrapper[4863]: E1205 06:48:03.601309 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:03 crc kubenswrapper[4863]: E1205 06:48:03.601428 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:04 crc kubenswrapper[4863]: I1205 06:48:04.601882 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:04 crc kubenswrapper[4863]: E1205 06:48:04.602023 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:04 crc kubenswrapper[4863]: I1205 06:48:04.602855 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:48:04 crc kubenswrapper[4863]: E1205 06:48:04.603041 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:48:05 crc kubenswrapper[4863]: I1205 06:48:05.600964 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:05 crc kubenswrapper[4863]: I1205 06:48:05.601058 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:05 crc kubenswrapper[4863]: E1205 06:48:05.601086 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:05 crc kubenswrapper[4863]: I1205 06:48:05.601148 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:05 crc kubenswrapper[4863]: E1205 06:48:05.601292 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:05 crc kubenswrapper[4863]: E1205 06:48:05.601441 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:06 crc kubenswrapper[4863]: I1205 06:48:06.601680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:06 crc kubenswrapper[4863]: E1205 06:48:06.602302 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:07 crc kubenswrapper[4863]: I1205 06:48:07.601634 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:07 crc kubenswrapper[4863]: I1205 06:48:07.601781 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:07 crc kubenswrapper[4863]: E1205 06:48:07.601959 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:07 crc kubenswrapper[4863]: I1205 06:48:07.602230 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:07 crc kubenswrapper[4863]: E1205 06:48:07.602388 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:07 crc kubenswrapper[4863]: E1205 06:48:07.602720 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:08 crc kubenswrapper[4863]: I1205 06:48:08.601255 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:08 crc kubenswrapper[4863]: E1205 06:48:08.601464 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.427594 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/1.log" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.429345 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/0.log" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.429441 4863 generic.go:334] "Generic (PLEG): container finished" podID="b9e2cdef-4a53-4f32-b973-e5d6ba0708db" containerID="8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c" exitCode=1 Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.429513 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerDied","Data":"8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c"} Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.429565 4863 scope.go:117] "RemoveContainer" containerID="29b92c02227685f6593f1a4ebc6f41e98d0db6228f7bf02e5e15c59c2ee05fe4" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.430148 4863 scope.go:117] "RemoveContainer" containerID="8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c" Dec 05 06:48:09 crc kubenswrapper[4863]: E1205 06:48:09.430387 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vw8fd_openshift-multus(b9e2cdef-4a53-4f32-b973-e5d6ba0708db)\"" pod="openshift-multus/multus-vw8fd" podUID="b9e2cdef-4a53-4f32-b973-e5d6ba0708db" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.461453 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hfb8h" podStartSLOduration=93.461397388 podStartE2EDuration="1m33.461397388s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:47:57.399606213 +0000 UTC m=+105.125603293" watchObservedRunningTime="2025-12-05 06:48:09.461397388 +0000 UTC m=+117.187394458" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.601615 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.601651 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:09 crc kubenswrapper[4863]: I1205 06:48:09.601827 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:09 crc kubenswrapper[4863]: E1205 06:48:09.602091 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:09 crc kubenswrapper[4863]: E1205 06:48:09.602421 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:09 crc kubenswrapper[4863]: E1205 06:48:09.602693 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:10 crc kubenswrapper[4863]: I1205 06:48:10.436365 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/1.log" Dec 05 06:48:10 crc kubenswrapper[4863]: I1205 06:48:10.600989 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:10 crc kubenswrapper[4863]: E1205 06:48:10.601172 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:11 crc kubenswrapper[4863]: I1205 06:48:11.601464 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:11 crc kubenswrapper[4863]: I1205 06:48:11.601624 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:11 crc kubenswrapper[4863]: E1205 06:48:11.601692 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:11 crc kubenswrapper[4863]: I1205 06:48:11.601620 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:11 crc kubenswrapper[4863]: E1205 06:48:11.601821 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:11 crc kubenswrapper[4863]: E1205 06:48:11.601923 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:12 crc kubenswrapper[4863]: E1205 06:48:12.535973 4863 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 06:48:12 crc kubenswrapper[4863]: I1205 06:48:12.601940 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:12 crc kubenswrapper[4863]: E1205 06:48:12.603024 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:12 crc kubenswrapper[4863]: E1205 06:48:12.686870 4863 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:48:13 crc kubenswrapper[4863]: I1205 06:48:13.601744 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:13 crc kubenswrapper[4863]: I1205 06:48:13.601803 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:13 crc kubenswrapper[4863]: E1205 06:48:13.601960 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:13 crc kubenswrapper[4863]: I1205 06:48:13.602324 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:13 crc kubenswrapper[4863]: E1205 06:48:13.602466 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:13 crc kubenswrapper[4863]: E1205 06:48:13.602766 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:14 crc kubenswrapper[4863]: I1205 06:48:14.601447 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:14 crc kubenswrapper[4863]: E1205 06:48:14.601717 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:15 crc kubenswrapper[4863]: I1205 06:48:15.601568 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:15 crc kubenswrapper[4863]: I1205 06:48:15.601698 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:15 crc kubenswrapper[4863]: E1205 06:48:15.602021 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:15 crc kubenswrapper[4863]: I1205 06:48:15.602070 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:15 crc kubenswrapper[4863]: E1205 06:48:15.602740 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:15 crc kubenswrapper[4863]: E1205 06:48:15.602890 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:15 crc kubenswrapper[4863]: I1205 06:48:15.603293 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:48:15 crc kubenswrapper[4863]: E1205 06:48:15.603623 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xjcxh_openshift-ovn-kubernetes(4e873158-22c6-4eab-9cb1-438b0f50f46d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" Dec 05 06:48:16 crc kubenswrapper[4863]: I1205 06:48:16.601166 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:16 crc kubenswrapper[4863]: E1205 06:48:16.601412 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:17 crc kubenswrapper[4863]: I1205 06:48:17.600884 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:17 crc kubenswrapper[4863]: I1205 06:48:17.600950 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:17 crc kubenswrapper[4863]: I1205 06:48:17.600955 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:17 crc kubenswrapper[4863]: E1205 06:48:17.601056 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:17 crc kubenswrapper[4863]: E1205 06:48:17.601148 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:17 crc kubenswrapper[4863]: E1205 06:48:17.601353 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:17 crc kubenswrapper[4863]: E1205 06:48:17.689153 4863 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:48:18 crc kubenswrapper[4863]: I1205 06:48:18.601665 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:18 crc kubenswrapper[4863]: E1205 06:48:18.601897 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:19 crc kubenswrapper[4863]: I1205 06:48:19.601499 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:19 crc kubenswrapper[4863]: E1205 06:48:19.601633 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:19 crc kubenswrapper[4863]: I1205 06:48:19.601464 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:19 crc kubenswrapper[4863]: E1205 06:48:19.601718 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:19 crc kubenswrapper[4863]: I1205 06:48:19.601507 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:19 crc kubenswrapper[4863]: E1205 06:48:19.601976 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:20 crc kubenswrapper[4863]: I1205 06:48:20.601424 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:20 crc kubenswrapper[4863]: E1205 06:48:20.602014 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:21 crc kubenswrapper[4863]: I1205 06:48:21.601701 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:21 crc kubenswrapper[4863]: I1205 06:48:21.601815 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:21 crc kubenswrapper[4863]: I1205 06:48:21.601860 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:21 crc kubenswrapper[4863]: E1205 06:48:21.602056 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:21 crc kubenswrapper[4863]: E1205 06:48:21.602228 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:21 crc kubenswrapper[4863]: E1205 06:48:21.602335 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:22 crc kubenswrapper[4863]: I1205 06:48:22.604134 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:22 crc kubenswrapper[4863]: I1205 06:48:22.604343 4863 scope.go:117] "RemoveContainer" containerID="8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c" Dec 05 06:48:22 crc kubenswrapper[4863]: E1205 06:48:22.604385 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:22 crc kubenswrapper[4863]: E1205 06:48:22.707399 4863 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:48:23 crc kubenswrapper[4863]: I1205 06:48:23.498069 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/1.log" Dec 05 06:48:23 crc kubenswrapper[4863]: I1205 06:48:23.498125 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerStarted","Data":"a106b5c99fac4aa7ecb61515b8604f83bec91b87b174aeca1a85ca2adb46c601"} Dec 05 06:48:23 crc kubenswrapper[4863]: I1205 06:48:23.601159 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:23 crc kubenswrapper[4863]: I1205 06:48:23.601208 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:23 crc kubenswrapper[4863]: E1205 06:48:23.602067 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:23 crc kubenswrapper[4863]: E1205 06:48:23.602790 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:23 crc kubenswrapper[4863]: I1205 06:48:23.604701 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:23 crc kubenswrapper[4863]: E1205 06:48:23.604998 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:24 crc kubenswrapper[4863]: I1205 06:48:24.600991 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:24 crc kubenswrapper[4863]: E1205 06:48:24.601233 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:25 crc kubenswrapper[4863]: I1205 06:48:25.601680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:25 crc kubenswrapper[4863]: I1205 06:48:25.601844 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:25 crc kubenswrapper[4863]: E1205 06:48:25.601881 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:25 crc kubenswrapper[4863]: I1205 06:48:25.601708 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:25 crc kubenswrapper[4863]: E1205 06:48:25.602080 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:25 crc kubenswrapper[4863]: E1205 06:48:25.602290 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:26 crc kubenswrapper[4863]: I1205 06:48:26.602242 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:26 crc kubenswrapper[4863]: E1205 06:48:26.602548 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:27 crc kubenswrapper[4863]: I1205 06:48:27.600867 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:27 crc kubenswrapper[4863]: I1205 06:48:27.600878 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:27 crc kubenswrapper[4863]: E1205 06:48:27.601079 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:27 crc kubenswrapper[4863]: I1205 06:48:27.600890 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:27 crc kubenswrapper[4863]: E1205 06:48:27.601181 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:27 crc kubenswrapper[4863]: E1205 06:48:27.601310 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:27 crc kubenswrapper[4863]: E1205 06:48:27.709392 4863 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:48:28 crc kubenswrapper[4863]: I1205 06:48:28.602006 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:28 crc kubenswrapper[4863]: E1205 06:48:28.602221 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:29 crc kubenswrapper[4863]: I1205 06:48:29.601377 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:29 crc kubenswrapper[4863]: I1205 06:48:29.601517 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:29 crc kubenswrapper[4863]: E1205 06:48:29.601593 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:29 crc kubenswrapper[4863]: E1205 06:48:29.601715 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:29 crc kubenswrapper[4863]: I1205 06:48:29.601784 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:29 crc kubenswrapper[4863]: E1205 06:48:29.602030 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:30 crc kubenswrapper[4863]: I1205 06:48:30.601101 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:30 crc kubenswrapper[4863]: E1205 06:48:30.601376 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:30 crc kubenswrapper[4863]: I1205 06:48:30.602284 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.530763 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/3.log" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.534310 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerStarted","Data":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.534840 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.601271 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.601341 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.601292 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:31 crc kubenswrapper[4863]: E1205 06:48:31.601508 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:31 crc kubenswrapper[4863]: E1205 06:48:31.601591 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:31 crc kubenswrapper[4863]: E1205 06:48:31.601778 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.787648 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podStartSLOduration=115.787626686 podStartE2EDuration="1m55.787626686s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:31.5799879 +0000 UTC m=+139.305984950" watchObservedRunningTime="2025-12-05 06:48:31.787626686 +0000 UTC m=+139.513623766" Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.788028 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-96nzc"] Dec 05 06:48:31 crc kubenswrapper[4863]: I1205 06:48:31.788126 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:31 crc kubenswrapper[4863]: E1205 06:48:31.788247 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:32 crc kubenswrapper[4863]: E1205 06:48:32.710519 4863 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:48:33 crc kubenswrapper[4863]: I1205 06:48:33.601413 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:33 crc kubenswrapper[4863]: I1205 06:48:33.601594 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:33 crc kubenswrapper[4863]: E1205 06:48:33.601712 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:33 crc kubenswrapper[4863]: I1205 06:48:33.601759 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:33 crc kubenswrapper[4863]: I1205 06:48:33.601790 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:33 crc kubenswrapper[4863]: E1205 06:48:33.601910 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:33 crc kubenswrapper[4863]: E1205 06:48:33.602139 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:33 crc kubenswrapper[4863]: E1205 06:48:33.602193 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:35 crc kubenswrapper[4863]: I1205 06:48:35.601714 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:35 crc kubenswrapper[4863]: I1205 06:48:35.601848 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:35 crc kubenswrapper[4863]: I1205 06:48:35.601744 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:35 crc kubenswrapper[4863]: I1205 06:48:35.601783 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:35 crc kubenswrapper[4863]: E1205 06:48:35.602338 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:35 crc kubenswrapper[4863]: E1205 06:48:35.602573 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:35 crc kubenswrapper[4863]: E1205 06:48:35.602626 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:35 crc kubenswrapper[4863]: E1205 06:48:35.602766 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:37 crc kubenswrapper[4863]: I1205 06:48:37.601332 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:37 crc kubenswrapper[4863]: I1205 06:48:37.601371 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:37 crc kubenswrapper[4863]: I1205 06:48:37.601693 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:37 crc kubenswrapper[4863]: I1205 06:48:37.601733 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:37 crc kubenswrapper[4863]: E1205 06:48:37.601848 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 06:48:37 crc kubenswrapper[4863]: E1205 06:48:37.602106 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-96nzc" podUID="3943e053-ef4c-4348-98a8-cc1473a197f2" Dec 05 06:48:37 crc kubenswrapper[4863]: E1205 06:48:37.602195 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 06:48:37 crc kubenswrapper[4863]: E1205 06:48:37.602298 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 06:48:38 crc kubenswrapper[4863]: I1205 06:48:38.463982 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:48:38 crc kubenswrapper[4863]: I1205 06:48:38.464070 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.602021 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.602712 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.602829 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.603076 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.605327 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.605857 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.607830 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.607885 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.607886 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 06:48:39 crc kubenswrapper[4863]: I1205 06:48:39.608249 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.216375 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:40 crc kubenswrapper[4863]: E1205 06:48:40.216655 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:50:42.216614108 +0000 UTC m=+269.942611188 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.216801 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.216877 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.216935 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.216978 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.227413 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.227866 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.228123 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.242407 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.527080 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.563900 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:40 crc kubenswrapper[4863]: I1205 06:48:40.572161 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 06:48:40 crc kubenswrapper[4863]: W1205 06:48:40.859425 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-518565915a5cd9c7ac0526eaf09888bc7f1abef0d715e2fc55326233fe269f8a WatchSource:0}: Error finding container 518565915a5cd9c7ac0526eaf09888bc7f1abef0d715e2fc55326233fe269f8a: Status 404 returned error can't find the container with id 518565915a5cd9c7ac0526eaf09888bc7f1abef0d715e2fc55326233fe269f8a Dec 05 06:48:41 crc kubenswrapper[4863]: W1205 06:48:41.021324 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-da03eece31eee75213c9ec90df77285c333a7540ea203640487d875825436963 WatchSource:0}: Error finding container da03eece31eee75213c9ec90df77285c333a7540ea203640487d875825436963: Status 404 returned error can't find the container with id da03eece31eee75213c9ec90df77285c333a7540ea203640487d875825436963 Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.573810 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e207e62b9ab7abf847023b24ae3e2e43d3efa48731210aafdec891ea559b9448"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.574235 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f13091624a242d5020d25fc1c9493fa69da5ac61550f2e8f02de3720912f9649"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.576117 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b84556b2f57a6b9f6e69fec9e42f0f3fd1b75c2b88cbd315b04baf334405142b"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.576169 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"518565915a5cd9c7ac0526eaf09888bc7f1abef0d715e2fc55326233fe269f8a"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.578059 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"51668571c55e30e1574e73a544a3d63da0ffc294b3bce0a00139163e962238fd"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.578107 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"da03eece31eee75213c9ec90df77285c333a7540ea203640487d875825436963"} Dec 05 06:48:41 crc kubenswrapper[4863]: I1205 06:48:41.578336 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.295717 4863 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.370074 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdkmd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.381843 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.383154 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.383355 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.383460 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.383804 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.384290 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.384798 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.384947 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw4vb"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.385774 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.385927 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.386373 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.388639 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.397095 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.397512 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdkmd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.397621 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.397802 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.403011 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.403490 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.403742 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404056 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404301 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404537 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404609 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404645 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.404979 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.405684 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407050 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407342 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407507 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407684 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407781 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407387 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.407701 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.409086 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.409146 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.409257 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.410645 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.411702 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.411821 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.411920 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.411994 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412065 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412360 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412388 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412407 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412440 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412492 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.412490 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.423678 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.424104 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.425920 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.426631 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.427026 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.430040 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.430733 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.430956 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.431169 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.431400 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.431660 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.431930 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.432058 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.432197 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.431935 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.432490 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.433669 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.434214 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.435380 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.435500 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.440566 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.441151 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.441996 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.442487 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.443771 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.447000 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.447433 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.447496 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.447537 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.447622 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.454295 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5wjml"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.454324 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.455640 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.455668 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.455688 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.463695 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f2b9m"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.464869 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.465115 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.465435 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njvdd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.466534 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.468020 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.468670 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.471730 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.472356 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.473131 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.473409 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.473558 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.474860 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.476890 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.477109 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.477237 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.477288 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.477359 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.477246 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.478667 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.485591 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.486947 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.487159 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.487542 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.488211 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-llph2"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.488559 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.488580 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.488668 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.489245 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.489279 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.489524 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.490446 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.491057 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t7c7l"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.491365 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.491719 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492338 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492373 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bbdv\" (UniqueName: \"kubernetes.io/projected/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-kube-api-access-4bbdv\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492393 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v84j7\" (UniqueName: \"kubernetes.io/projected/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-kube-api-access-v84j7\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492409 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40cb74c2-0f3d-497b-b23d-97159cc1cb95-serving-cert\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492426 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sprfs\" (UniqueName: \"kubernetes.io/projected/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-kube-api-access-sprfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492441 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-auth-proxy-config\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492456 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492487 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492505 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492520 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492566 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9gzh\" (UniqueName: \"kubernetes.io/projected/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-kube-api-access-g9gzh\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492605 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drmjf\" (UniqueName: \"kubernetes.io/projected/9a775603-788c-43d8-92d9-b5383855ed57-kube-api-access-drmjf\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492637 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492674 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492726 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-auth-proxy-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492750 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-client\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492775 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac9a6e96-9e1f-4791-8956-04dff6809ccd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492803 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2100bb3f-455d-4d24-a5c6-e0c818f5137d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492830 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-policies\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492849 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492871 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g66bw\" (UniqueName: \"kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492907 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-serving-cert\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492928 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492979 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit-dir\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.492996 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8npd6\" (UniqueName: \"kubernetes.io/projected/ac9a6e96-9e1f-4791-8956-04dff6809ccd-kube-api-access-8npd6\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493012 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-config\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493063 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-service-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493079 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-metrics-tls\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493096 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493114 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-image-import-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493130 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-encryption-config\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493146 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-tmpfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493161 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-client\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493178 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57s27\" (UniqueName: \"kubernetes.io/projected/0170aa22-9c08-4a04-910b-00c852ebca97-kube-api-access-57s27\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493196 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-serving-cert\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zmjk\" (UniqueName: \"kubernetes.io/projected/2100bb3f-455d-4d24-a5c6-e0c818f5137d-kube-api-access-7zmjk\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493227 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvzsf\" (UniqueName: \"kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493244 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493259 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-images\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493276 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b78007fd-f257-499b-a02a-dd6cf90ef471-machine-approver-tls\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493291 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctg8p\" (UniqueName: \"kubernetes.io/projected/b78007fd-f257-499b-a02a-dd6cf90ef471-kube-api-access-ctg8p\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493306 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-encryption-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493322 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-images\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493337 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7wsr\" (UniqueName: \"kubernetes.io/projected/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-kube-api-access-f7wsr\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493353 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svpqt\" (UniqueName: \"kubernetes.io/projected/40cb74c2-0f3d-497b-b23d-97159cc1cb95-kube-api-access-svpqt\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493371 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493386 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493403 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-node-pullsecrets\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493417 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493432 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xgm7\" (UniqueName: \"kubernetes.io/projected/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-kube-api-access-8xgm7\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493447 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a775603-788c-43d8-92d9-b5383855ed57-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493463 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0170aa22-9c08-4a04-910b-00c852ebca97-proxy-tls\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493507 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p8r6\" (UniqueName: \"kubernetes.io/projected/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-kube-api-access-7p8r6\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493522 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493537 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-config\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493552 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-config\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493569 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-apiservice-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493585 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-webhook-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493599 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493614 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2100bb3f-455d-4d24-a5c6-e0c818f5137d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493630 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493662 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76nbm\" (UniqueName: \"kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493677 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493691 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-dir\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493706 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.493847 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.498330 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.499267 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.499929 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.500226 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.501138 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.501513 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.501652 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.501791 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.501916 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502042 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502150 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502357 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502457 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502547 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502627 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502690 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502780 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502796 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502902 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502947 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.502994 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.503117 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.503327 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.503697 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.503931 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.503976 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.504006 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.504155 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.507767 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.508221 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.508542 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.508714 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.512749 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.512880 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.513045 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.513265 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.516042 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.516521 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.516557 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.518052 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-m58b7"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.518658 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.520074 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.520793 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.521054 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.522099 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.522276 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.522834 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.523339 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.525103 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.528733 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcrtg"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.529516 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.533294 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.535194 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-l447b"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.545088 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.546092 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7tgbj"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.547443 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.548214 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.552176 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.552372 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.557179 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.559058 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.559275 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw4vb"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.560984 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.562697 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.564764 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.569587 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.573172 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.575244 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.578956 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-dffgt"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.580016 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.580990 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.582083 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.584731 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f2b9m"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.584793 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njvdd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.585204 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.586760 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.587800 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5wjml"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.587855 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t7c7l"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.589448 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-lx9lk"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.590733 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.590807 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.591200 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-kbwgz"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.592000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.593245 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcrtg"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594347 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2fmh\" (UniqueName: \"kubernetes.io/projected/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-kube-api-access-f2fmh\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594381 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9gzh\" (UniqueName: \"kubernetes.io/projected/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-kube-api-access-g9gzh\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594442 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drmjf\" (UniqueName: \"kubernetes.io/projected/9a775603-788c-43d8-92d9-b5383855ed57-kube-api-access-drmjf\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594461 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594517 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfw4s\" (UniqueName: \"kubernetes.io/projected/e4e03408-7343-4cb3-8cf0-a3b3237a1d9f-kube-api-access-gfw4s\") pod \"migrator-59844c95c7-8d6x4\" (UID: \"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594541 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594559 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-auth-proxy-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594575 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-client\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594591 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac9a6e96-9e1f-4791-8956-04dff6809ccd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594609 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2100bb3f-455d-4d24-a5c6-e0c818f5137d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594624 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-policies\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594638 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594653 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g66bw\" (UniqueName: \"kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594672 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-serving-cert\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594689 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594708 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2xsp\" (UniqueName: \"kubernetes.io/projected/e496e045-f40a-4735-b2fa-4212a4d8412d-kube-api-access-m2xsp\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594727 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594745 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit-dir\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594762 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8npd6\" (UniqueName: \"kubernetes.io/projected/ac9a6e96-9e1f-4791-8956-04dff6809ccd-kube-api-access-8npd6\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594779 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-config\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594798 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-service-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594831 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-metrics-tls\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594847 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k578z\" (UniqueName: \"kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.594927 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.595624 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597305 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-image-import-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597342 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-encryption-config\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597364 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-tmpfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597380 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-client\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597399 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57s27\" (UniqueName: \"kubernetes.io/projected/0170aa22-9c08-4a04-910b-00c852ebca97-kube-api-access-57s27\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597416 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-serving-cert\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597437 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zmjk\" (UniqueName: \"kubernetes.io/projected/2100bb3f-455d-4d24-a5c6-e0c818f5137d-kube-api-access-7zmjk\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597456 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvzsf\" (UniqueName: \"kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597488 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597506 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-images\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597523 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597540 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvrzg\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-kube-api-access-fvrzg\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597558 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b78007fd-f257-499b-a02a-dd6cf90ef471-machine-approver-tls\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597576 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctg8p\" (UniqueName: \"kubernetes.io/projected/b78007fd-f257-499b-a02a-dd6cf90ef471-kube-api-access-ctg8p\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597594 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-encryption-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597612 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f41784ac-d179-4d00-9cde-21e8683daf48-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597631 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f27898ec-5484-4679-a44a-a06bcd9e9e4c-config\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597647 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e496e045-f40a-4735-b2fa-4212a4d8412d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597665 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f41784ac-d179-4d00-9cde-21e8683daf48-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597684 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-images\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597702 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7wsr\" (UniqueName: \"kubernetes.io/projected/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-kube-api-access-f7wsr\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svpqt\" (UniqueName: \"kubernetes.io/projected/40cb74c2-0f3d-497b-b23d-97159cc1cb95-kube-api-access-svpqt\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597736 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxnmm\" (UniqueName: \"kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597754 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597771 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597790 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-node-pullsecrets\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597807 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xgm7\" (UniqueName: \"kubernetes.io/projected/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-kube-api-access-8xgm7\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597881 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a775603-788c-43d8-92d9-b5383855ed57-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597893 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-policies\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597902 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0170aa22-9c08-4a04-910b-00c852ebca97-proxy-tls\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597959 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p8r6\" (UniqueName: \"kubernetes.io/projected/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-kube-api-access-7p8r6\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.597982 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598003 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-config\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598020 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-config\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598035 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598040 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598147 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598169 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f41784ac-d179-4d00-9cde-21e8683daf48-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598234 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-apiservice-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598252 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-webhook-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598318 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2100bb3f-455d-4d24-a5c6-e0c818f5137d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598327 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598424 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598537 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598564 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-config\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598756 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598779 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76nbm\" (UniqueName: \"kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598915 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2100bb3f-455d-4d24-a5c6-e0c818f5137d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598930 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit-dir\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.598001 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-service-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.599160 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b78007fd-f257-499b-a02a-dd6cf90ef471-auth-proxy-config\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.599201 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.600081 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.600550 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac9a6e96-9e1f-4791-8956-04dff6809ccd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.600612 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-tmpfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.601493 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-config\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.601863 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-config\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602296 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-dir\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602341 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602372 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602402 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602428 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602603 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602656 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e496e045-f40a-4735-b2fa-4212a4d8412d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602698 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602729 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk8fn\" (UniqueName: \"kubernetes.io/projected/f27898ec-5484-4679-a44a-a06bcd9e9e4c-kube-api-access-mk8fn\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bbdv\" (UniqueName: \"kubernetes.io/projected/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-kube-api-access-4bbdv\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602799 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-audit-dir\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602803 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v84j7\" (UniqueName: \"kubernetes.io/projected/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-kube-api-access-v84j7\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f27898ec-5484-4679-a44a-a06bcd9e9e4c-serving-cert\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602870 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40cb74c2-0f3d-497b-b23d-97159cc1cb95-serving-cert\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602891 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sprfs\" (UniqueName: \"kubernetes.io/projected/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-kube-api-access-sprfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602899 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-node-pullsecrets\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602910 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-auth-proxy-config\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.602955 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.603010 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.603034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.603060 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.603084 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.604538 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-l447b"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.605781 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.606634 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-serving-cert\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.606932 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-etcd-client\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.607157 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.607217 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.607849 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.608408 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-llph2"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.608937 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.609231 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.609622 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a775603-788c-43d8-92d9-b5383855ed57-images\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.609771 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-encryption-config\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610092 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610187 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610296 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0170aa22-9c08-4a04-910b-00c852ebca97-proxy-tls\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610382 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-images\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610509 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0170aa22-9c08-4a04-910b-00c852ebca97-auth-proxy-config\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.610671 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40cb74c2-0f3d-497b-b23d-97159cc1cb95-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.611115 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.611302 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.611408 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.611647 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.612050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-metrics-tls\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.612603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.612798 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613055 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613403 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613549 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a775603-788c-43d8-92d9-b5383855ed57-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613822 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-etcd-client\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613837 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40cb74c2-0f3d-497b-b23d-97159cc1cb95-serving-cert\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613843 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-apiservice-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613853 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613871 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b78007fd-f257-499b-a02a-dd6cf90ef471-machine-approver-tls\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.613896 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-serving-cert\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.614006 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.614256 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-image-import-ca\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.614331 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-audit\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.614640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2100bb3f-455d-4d24-a5c6-e0c818f5137d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.614683 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.615075 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.615889 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-encryption-config\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.615974 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-webhook-cert\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.616384 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.617489 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.618743 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.619770 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.620905 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kbwgz"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.621929 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.623179 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.624282 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.626976 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.628277 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.637537 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7tgbj"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.639451 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.642096 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.643339 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.644518 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dffgt"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.644836 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.645749 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7nt4n"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.647013 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7nt4n"] Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.647102 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.664403 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.685876 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704194 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704221 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvrzg\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-kube-api-access-fvrzg\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704247 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f41784ac-d179-4d00-9cde-21e8683daf48-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704264 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f27898ec-5484-4679-a44a-a06bcd9e9e4c-config\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704283 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e496e045-f40a-4735-b2fa-4212a4d8412d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f41784ac-d179-4d00-9cde-21e8683daf48-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704328 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxnmm\" (UniqueName: \"kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704360 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704378 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704395 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704410 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f41784ac-d179-4d00-9cde-21e8683daf48-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704428 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704444 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704482 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704500 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704516 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704532 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704547 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e496e045-f40a-4735-b2fa-4212a4d8412d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704564 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk8fn\" (UniqueName: \"kubernetes.io/projected/f27898ec-5484-4679-a44a-a06bcd9e9e4c-kube-api-access-mk8fn\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704593 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f27898ec-5484-4679-a44a-a06bcd9e9e4c-serving-cert\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704618 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704635 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2fmh\" (UniqueName: \"kubernetes.io/projected/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-kube-api-access-f2fmh\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704655 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704684 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfw4s\" (UniqueName: \"kubernetes.io/projected/e4e03408-7343-4cb3-8cf0-a3b3237a1d9f-kube-api-access-gfw4s\") pod \"migrator-59844c95c7-8d6x4\" (UID: \"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704723 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2xsp\" (UniqueName: \"kubernetes.io/projected/e496e045-f40a-4735-b2fa-4212a4d8412d-kube-api-access-m2xsp\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704741 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704765 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k578z\" (UniqueName: \"kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.704793 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.705119 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e496e045-f40a-4735-b2fa-4212a4d8412d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.705319 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.707445 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e496e045-f40a-4735-b2fa-4212a4d8412d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.725166 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.745822 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.765886 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.785339 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.804810 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.819187 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.828615 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.829972 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.844591 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.864756 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.884435 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.918070 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.925657 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.927102 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.946677 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.965265 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.986088 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 06:48:47 crc kubenswrapper[4863]: I1205 06:48:47.999640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f41784ac-d179-4d00-9cde-21e8683daf48-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.006143 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.025976 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.036175 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f41784ac-d179-4d00-9cde-21e8683daf48-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.046600 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.065810 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.085776 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.105383 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.125608 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.146524 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.166387 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.186058 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.205837 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.219045 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-serving-cert\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.226545 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.246324 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.265958 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.285041 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.306533 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.320326 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.327101 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.339908 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.346133 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.366196 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.373639 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.377146 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.386661 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.397768 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.416194 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.425909 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.426936 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.436399 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.445467 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.466338 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.485717 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.506371 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.519759 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f27898ec-5484-4679-a44a-a06bcd9e9e4c-serving-cert\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.524400 4863 request.go:700] Waited for 1.000743462s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/configmaps?fieldSelector=metadata.name%3Dservice-ca-operator-config&limit=500&resourceVersion=0 Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.525961 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.535371 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f27898ec-5484-4679-a44a-a06bcd9e9e4c-config\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.546226 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.565638 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.586787 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.605307 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.626809 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.645774 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.680612 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.685979 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: E1205 06:48:48.705516 4863 secret.go:188] Couldn't get secret openshift-image-registry/image-registry-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 06:48:48 crc kubenswrapper[4863]: E1205 06:48:48.705653 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls podName:aa1c6030-fddf-4fe5-a6d2-55bdce963b06 nodeName:}" failed. No retries permitted until 2025-12-05 06:48:49.20561517 +0000 UTC m=+156.931612250 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "image-registry-operator-tls" (UniqueName: "kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls") pod "cluster-image-registry-operator-dc59b4c8b-lt45w" (UID: "aa1c6030-fddf-4fe5-a6d2-55bdce963b06") : failed to sync secret cache: timed out waiting for the condition Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.706580 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.728784 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.746514 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.766794 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.785959 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.822400 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.826131 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.845659 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.866364 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.886309 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.910879 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.945793 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.945843 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.966063 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 06:48:48 crc kubenswrapper[4863]: I1205 06:48:48.985911 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.010451 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.026604 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.045929 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.066649 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.085543 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.105564 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.126233 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.145949 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.166275 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.186013 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.206264 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.226147 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.232490 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.237750 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.247996 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.285706 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.306609 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.327132 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.345091 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.366695 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.386193 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.406920 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.450283 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9gzh\" (UniqueName: \"kubernetes.io/projected/f4acb57f-4cef-4cb5-aa9b-cdce33f8de60-kube-api-access-g9gzh\") pod \"apiserver-7bbb656c7d-v5f6h\" (UID: \"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.476579 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drmjf\" (UniqueName: \"kubernetes.io/projected/9a775603-788c-43d8-92d9-b5383855ed57-kube-api-access-drmjf\") pod \"machine-api-operator-5694c8668f-pdkmd\" (UID: \"9a775603-788c-43d8-92d9-b5383855ed57\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.484937 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g66bw\" (UniqueName: \"kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw\") pod \"collect-profiles-29415285-phcfn\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.505408 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8npd6\" (UniqueName: \"kubernetes.io/projected/ac9a6e96-9e1f-4791-8956-04dff6809ccd-kube-api-access-8npd6\") pod \"cluster-samples-operator-665b6dd947-nmq82\" (UID: \"ac9a6e96-9e1f-4791-8956-04dff6809ccd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.525240 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.531696 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p8r6\" (UniqueName: \"kubernetes.io/projected/f2f7af7f-6dfe-440d-b35c-b44a516c2dd8-kube-api-access-7p8r6\") pod \"dns-operator-744455d44c-f2b9m\" (UID: \"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.535266 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.543435 4863 request.go:700] Waited for 1.942481032s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/serviceaccounts/machine-config-operator/token Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.563552 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctg8p\" (UniqueName: \"kubernetes.io/projected/b78007fd-f257-499b-a02a-dd6cf90ef471-kube-api-access-ctg8p\") pod \"machine-approver-56656f9798-mpht4\" (UID: \"b78007fd-f257-499b-a02a-dd6cf90ef471\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.566066 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57s27\" (UniqueName: \"kubernetes.io/projected/0170aa22-9c08-4a04-910b-00c852ebca97-kube-api-access-57s27\") pod \"machine-config-operator-74547568cd-26tcr\" (UID: \"0170aa22-9c08-4a04-910b-00c852ebca97\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.580060 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7wsr\" (UniqueName: \"kubernetes.io/projected/6eddcad1-79bc-4912-8bb4-7d6f5f018b64-kube-api-access-f7wsr\") pod \"openshift-apiserver-operator-796bbdcf4f-976gc\" (UID: \"6eddcad1-79bc-4912-8bb4-7d6f5f018b64\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.590483 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.609302 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zmjk\" (UniqueName: \"kubernetes.io/projected/2100bb3f-455d-4d24-a5c6-e0c818f5137d-kube-api-access-7zmjk\") pod \"openshift-controller-manager-operator-756b6f6bc6-l55kt\" (UID: \"2100bb3f-455d-4d24-a5c6-e0c818f5137d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.609439 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.639268 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svpqt\" (UniqueName: \"kubernetes.io/projected/40cb74c2-0f3d-497b-b23d-97159cc1cb95-kube-api-access-svpqt\") pod \"authentication-operator-69f744f599-5wjml\" (UID: \"40cb74c2-0f3d-497b-b23d-97159cc1cb95\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.655902 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.666867 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.668284 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76nbm\" (UniqueName: \"kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm\") pod \"route-controller-manager-6576b87f9c-6rxcb\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.674082 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xgm7\" (UniqueName: \"kubernetes.io/projected/7b557ce2-a0bf-4bf8-b37d-89d424b68d14-kube-api-access-8xgm7\") pod \"apiserver-76f77b778f-xw4vb\" (UID: \"7b557ce2-a0bf-4bf8-b37d-89d424b68d14\") " pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.681108 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.683671 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v84j7\" (UniqueName: \"kubernetes.io/projected/d4e818aa-ebd1-4fc8-bf0f-baa83e430e50-kube-api-access-v84j7\") pod \"control-plane-machine-set-operator-78cbb6b69f-tdck6\" (UID: \"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.691269 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.699749 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.700946 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvzsf\" (UniqueName: \"kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf\") pod \"controller-manager-879f6c89f-xq987\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.726338 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sprfs\" (UniqueName: \"kubernetes.io/projected/4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5-kube-api-access-sprfs\") pod \"packageserver-d55dfcdfc-b2ln6\" (UID: \"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.739180 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h"] Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.740355 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bbdv\" (UniqueName: \"kubernetes.io/projected/f9eb1c3d-a038-4b8a-a352-c906a1e53e94-kube-api-access-4bbdv\") pod \"multus-admission-controller-857f4d67dd-njvdd\" (UID: \"f9eb1c3d-a038-4b8a-a352-c906a1e53e94\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.765879 4863 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.786327 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.798181 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdkmd"] Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.809174 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.816821 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" Dec 05 06:48:49 crc kubenswrapper[4863]: W1205 06:48:49.821786 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a775603_788c_43d8_92d9_b5383855ed57.slice/crio-da99b8061ef686f103aa83cffd058d11c1476b66e858f37b2e451a312007aa7f WatchSource:0}: Error finding container da99b8061ef686f103aa83cffd058d11c1476b66e858f37b2e451a312007aa7f: Status 404 returned error can't find the container with id da99b8061ef686f103aa83cffd058d11c1476b66e858f37b2e451a312007aa7f Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.842624 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxnmm\" (UniqueName: \"kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm\") pod \"console-f9d7485db-wscq8\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.846051 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.846117 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.854284 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82"] Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.855949 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.861099 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f41784ac-d179-4d00-9cde-21e8683daf48-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w9xlr\" (UID: \"f41784ac-d179-4d00-9cde-21e8683daf48\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.883266 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvrzg\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-kube-api-access-fvrzg\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.887098 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.905030 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2fmh\" (UniqueName: \"kubernetes.io/projected/78143d95-87cc-4aa5-acd1-7ad8674dfbd9-kube-api-access-f2fmh\") pod \"openshift-config-operator-7777fb866f-mpxlw\" (UID: \"78143d95-87cc-4aa5-acd1-7ad8674dfbd9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.923028 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk8fn\" (UniqueName: \"kubernetes.io/projected/f27898ec-5484-4679-a44a-a06bcd9e9e4c-kube-api-access-mk8fn\") pod \"service-ca-operator-777779d784-fdvqk\" (UID: \"f27898ec-5484-4679-a44a-a06bcd9e9e4c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.924715 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.940629 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfw4s\" (UniqueName: \"kubernetes.io/projected/e4e03408-7343-4cb3-8cf0-a3b3237a1d9f-kube-api-access-gfw4s\") pod \"migrator-59844c95c7-8d6x4\" (UID: \"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.947225 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.960342 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k578z\" (UniqueName: \"kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z\") pod \"marketplace-operator-79b997595-c878b\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.975033 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" Dec 05 06:48:49 crc kubenswrapper[4863]: I1205 06:48:49.986320 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aa1c6030-fddf-4fe5-a6d2-55bdce963b06-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lt45w\" (UID: \"aa1c6030-fddf-4fe5-a6d2-55bdce963b06\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.005008 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2xsp\" (UniqueName: \"kubernetes.io/projected/e496e045-f40a-4735-b2fa-4212a4d8412d-kube-api-access-m2xsp\") pod \"kube-storage-version-migrator-operator-b67b599dd-fbcwk\" (UID: \"e496e045-f40a-4735-b2fa-4212a4d8412d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.043852 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.043882 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhbf8\" (UniqueName: \"kubernetes.io/projected/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-kube-api-access-nhbf8\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.043902 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-srv-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.043933 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-profile-collector-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.043948 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-serving-cert\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044087 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-stats-auth\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044197 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzs84\" (UniqueName: \"kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044220 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1b2516-a4dc-40ef-aec3-af80d706114c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044271 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044306 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044325 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044424 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d383f927-13d0-4003-acff-216eb565846b-trusted-ca\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044597 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a1b2516-a4dc-40ef-aec3-af80d706114c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044655 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j27l\" (UniqueName: \"kubernetes.io/projected/6f6037a0-cf83-47af-9425-bad91534bffb-kube-api-access-4j27l\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044688 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044710 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-trusted-ca\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044738 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044759 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-key\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044774 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl59n\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-kube-api-access-jl59n\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044813 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044828 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aab23073-3d1c-4559-a876-f2e300029ad4-proxy-tls\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044847 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044864 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsqkf\" (UniqueName: \"kubernetes.io/projected/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-kube-api-access-dsqkf\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044880 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-service-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044897 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f95688d7-dd9b-4c04-b696-42b8f54a2018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044935 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.044964 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045435 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045530 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045571 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h454z\" (UniqueName: \"kubernetes.io/projected/98bc47ef-be35-4fd0-8413-ccc346ada61c-kube-api-access-h454z\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045594 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4mx2\" (UniqueName: \"kubernetes.io/projected/58f537d3-f443-4553-a858-1b1455b98281-kube-api-access-x4mx2\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045625 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kllnm\" (UniqueName: \"kubernetes.io/projected/37ceeb88-98e7-4849-8ea3-8a1486d29314-kube-api-access-kllnm\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045641 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-metrics-certs\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045661 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045684 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f95688d7-dd9b-4c04-b696-42b8f54a2018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.045937 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58f537d3-f443-4553-a858-1b1455b98281-config-volume\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046129 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/58f537d3-f443-4553-a858-1b1455b98281-metrics-tls\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046163 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1b2516-a4dc-40ef-aec3-af80d706114c-config\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046180 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-default-certificate\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046209 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aab23073-3d1c-4559-a876-f2e300029ad4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046229 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046285 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d383f927-13d0-4003-acff-216eb565846b-metrics-tls\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046447 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046547 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046940 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046961 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046978 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxzcw\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.046994 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-serving-cert\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047023 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047087 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-cabundle\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047106 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpgnd\" (UniqueName: \"kubernetes.io/projected/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-kube-api-access-fpgnd\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047184 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f95688d7-dd9b-4c04-b696-42b8f54a2018-config\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047198 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8lbn\" (UniqueName: \"kubernetes.io/projected/a0db948e-d372-433d-8e87-26d82acaa96f-kube-api-access-s8lbn\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047226 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-service-ca-bundle\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047274 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-config\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047296 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047383 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047412 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-client\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047447 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-srv-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.047671 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:50.547628488 +0000 UTC m=+158.273625528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047696 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r76r4\" (UniqueName: \"kubernetes.io/projected/aab23073-3d1c-4559-a876-f2e300029ad4-kube-api-access-r76r4\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047755 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-config\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047774 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047788 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx7jh\" (UniqueName: \"kubernetes.io/projected/648d25d7-c956-48d0-936b-053e5ae954be-kube-api-access-sx7jh\") pod \"downloads-7954f5f757-7tgbj\" (UID: \"648d25d7-c956-48d0-936b-053e5ae954be\") " pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047803 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.047849 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6037a0-cf83-47af-9425-bad91534bffb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.078963 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.089921 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.123824 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.138180 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148321 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148570 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148616 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxzcw\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148658 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148712 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-serving-cert\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148763 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-cabundle\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148783 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpgnd\" (UniqueName: \"kubernetes.io/projected/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-kube-api-access-fpgnd\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.148830 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f95688d7-dd9b-4c04-b696-42b8f54a2018-config\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.149172 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:50.649146859 +0000 UTC m=+158.375143899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149867 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-service-ca-bundle\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149894 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8lbn\" (UniqueName: \"kubernetes.io/projected/a0db948e-d372-433d-8e87-26d82acaa96f-kube-api-access-s8lbn\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149926 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/871258f1-44d2-4a1b-934d-c20aaf2094c4-cert\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149955 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-config\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149988 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150048 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-client\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150082 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-srv-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150134 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r76r4\" (UniqueName: \"kubernetes.io/projected/aab23073-3d1c-4559-a876-f2e300029ad4-kube-api-access-r76r4\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150189 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-config\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.149736 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f95688d7-dd9b-4c04-b696-42b8f54a2018-config\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150230 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-registration-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150254 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150281 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx7jh\" (UniqueName: \"kubernetes.io/projected/648d25d7-c956-48d0-936b-053e5ae954be-kube-api-access-sx7jh\") pod \"downloads-7954f5f757-7tgbj\" (UID: \"648d25d7-c956-48d0-936b-053e5ae954be\") " pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150306 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150329 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6037a0-cf83-47af-9425-bad91534bffb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150354 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150375 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhbf8\" (UniqueName: \"kubernetes.io/projected/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-kube-api-access-nhbf8\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150411 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-srv-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150415 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150447 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-profile-collector-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150490 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-serving-cert\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150535 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-socket-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150573 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzs84\" (UniqueName: \"kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150595 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-stats-auth\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150633 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1b2516-a4dc-40ef-aec3-af80d706114c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150654 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150675 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150702 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150738 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d383f927-13d0-4003-acff-216eb565846b-trusted-ca\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150759 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a1b2516-a4dc-40ef-aec3-af80d706114c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150832 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j27l\" (UniqueName: \"kubernetes.io/projected/6f6037a0-cf83-47af-9425-bad91534bffb-kube-api-access-4j27l\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150867 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150898 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-trusted-ca\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150932 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150954 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-key\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl59n\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-kube-api-access-jl59n\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151003 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk4qs\" (UniqueName: \"kubernetes.io/projected/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-kube-api-access-hk4qs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151090 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151104 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aab23073-3d1c-4559-a876-f2e300029ad4-proxy-tls\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151144 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151215 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-service-ca-bundle\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.150084 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-cabundle\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.151301 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.154847 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.155058 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.154971 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-config\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.155637 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.156297 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.156493 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-client\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.156544 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6037a0-cf83-47af-9425-bad91534bffb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.156762 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.157401 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-srv-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.157444 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0db948e-d372-433d-8e87-26d82acaa96f-serving-cert\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.157594 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-profile-collector-cert\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.157997 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d383f927-13d0-4003-acff-216eb565846b-trusted-ca\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.158914 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.158969 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.159101 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.159371 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-serving-cert\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.159447 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsqkf\" (UniqueName: \"kubernetes.io/projected/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-kube-api-access-dsqkf\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.159696 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-service-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.159770 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f95688d7-dd9b-4c04-b696-42b8f54a2018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160320 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-service-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160420 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-certs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160545 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160591 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bk5h\" (UniqueName: \"kubernetes.io/projected/871258f1-44d2-4a1b-934d-c20aaf2094c4-kube-api-access-2bk5h\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160622 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160647 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-node-bootstrap-token\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160669 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-plugins-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160690 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzwm7\" (UniqueName: \"kubernetes.io/projected/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-kube-api-access-zzwm7\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160725 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160771 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h454z\" (UniqueName: \"kubernetes.io/projected/98bc47ef-be35-4fd0-8413-ccc346ada61c-kube-api-access-h454z\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4mx2\" (UniqueName: \"kubernetes.io/projected/58f537d3-f443-4553-a858-1b1455b98281-kube-api-access-x4mx2\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160887 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160924 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kllnm\" (UniqueName: \"kubernetes.io/projected/37ceeb88-98e7-4849-8ea3-8a1486d29314-kube-api-access-kllnm\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.160959 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-metrics-certs\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161000 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f95688d7-dd9b-4c04-b696-42b8f54a2018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161026 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58f537d3-f443-4553-a858-1b1455b98281-config-volume\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1b2516-a4dc-40ef-aec3-af80d706114c-config\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161111 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-default-certificate\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161138 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/58f537d3-f443-4553-a858-1b1455b98281-metrics-tls\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161162 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161179 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ceeb88-98e7-4849-8ea3-8a1486d29314-srv-cert\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161187 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aab23073-3d1c-4559-a876-f2e300029ad4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d383f927-13d0-4003-acff-216eb565846b-metrics-tls\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161414 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a0db948e-d372-433d-8e87-26d82acaa96f-etcd-ca\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161428 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.161891 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.162222 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aab23073-3d1c-4559-a876-f2e300029ad4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.162362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58f537d3-f443-4553-a858-1b1455b98281-config-volume\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.162397 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-mountpoint-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.162416 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-csi-data-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.163675 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-trusted-ca\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.163869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.164101 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.164133 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1b2516-a4dc-40ef-aec3-af80d706114c-config\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.164399 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.164578 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.164666 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aab23073-3d1c-4559-a876-f2e300029ad4-proxy-tls\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.165363 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-config\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.168145 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f95688d7-dd9b-4c04-b696-42b8f54a2018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.173312 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-stats-auth\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.173752 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1b2516-a4dc-40ef-aec3-af80d706114c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.176581 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.176829 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.183015 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.184056 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/58f537d3-f443-4553-a858-1b1455b98281-metrics-tls\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.202076 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpgnd\" (UniqueName: \"kubernetes.io/projected/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-kube-api-access-fpgnd\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.202825 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-default-certificate\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.204935 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d383f927-13d0-4003-acff-216eb565846b-metrics-tls\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.208517 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.208556 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1-metrics-certs\") pod \"router-default-5444994796-m58b7\" (UID: \"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1\") " pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.209249 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/98bc47ef-be35-4fd0-8413-ccc346ada61c-signing-key\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.210965 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.220895 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxzcw\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.237147 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.243496 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.245712 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5wjml"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.245758 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.259690 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8lbn\" (UniqueName: \"kubernetes.io/projected/a0db948e-d372-433d-8e87-26d82acaa96f-kube-api-access-s8lbn\") pod \"etcd-operator-b45778765-llph2\" (UID: \"a0db948e-d372-433d-8e87-26d82acaa96f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.262198 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269008 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-registration-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269112 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-socket-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269221 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk4qs\" (UniqueName: \"kubernetes.io/projected/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-kube-api-access-hk4qs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269319 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-certs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269394 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bk5h\" (UniqueName: \"kubernetes.io/projected/871258f1-44d2-4a1b-934d-c20aaf2094c4-kube-api-access-2bk5h\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269448 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-registration-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269533 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-plugins-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269610 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzwm7\" (UniqueName: \"kubernetes.io/projected/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-kube-api-access-zzwm7\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269716 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-node-bootstrap-token\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269823 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-mountpoint-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269894 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-csi-data-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269978 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.270051 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/871258f1-44d2-4a1b-934d-c20aaf2094c4-cert\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269927 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-mountpoint-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.270264 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:50.770252334 +0000 UTC m=+158.496249374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.270064 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-csi-data-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269670 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-socket-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.269917 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-plugins-dir\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.274200 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-node-bootstrap-token\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.274618 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/871258f1-44d2-4a1b-934d-c20aaf2094c4-cert\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.275077 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-certs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.287623 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx7jh\" (UniqueName: \"kubernetes.io/projected/648d25d7-c956-48d0-936b-053e5ae954be-kube-api-access-sx7jh\") pod \"downloads-7954f5f757-7tgbj\" (UID: \"648d25d7-c956-48d0-936b-053e5ae954be\") " pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.312703 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r76r4\" (UniqueName: \"kubernetes.io/projected/aab23073-3d1c-4559-a876-f2e300029ad4-kube-api-access-r76r4\") pod \"machine-config-controller-84d6567774-96nlv\" (UID: \"aab23073-3d1c-4559-a876-f2e300029ad4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.314072 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f2b9m"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.314153 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.328834 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhbf8\" (UniqueName: \"kubernetes.io/projected/0ea32fb0-42fe-4249-b4b6-a7784203b5dd-kube-api-access-nhbf8\") pod \"console-operator-58897d9998-rcrtg\" (UID: \"0ea32fb0-42fe-4249-b4b6-a7784203b5dd\") " pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.339137 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.346797 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl59n\" (UniqueName: \"kubernetes.io/projected/d383f927-13d0-4003-acff-216eb565846b-kube-api-access-jl59n\") pod \"ingress-operator-5b745b69d9-2gbd9\" (UID: \"d383f927-13d0-4003-acff-216eb565846b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.351815 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.370164 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.371264 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.385134 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:50.885096442 +0000 UTC m=+158.611093482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.389423 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzs84\" (UniqueName: \"kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84\") pod \"oauth-openshift-558db77b4-l447b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.393166 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a1b2516-a4dc-40ef-aec3-af80d706114c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6g9rw\" (UID: \"6a1b2516-a4dc-40ef-aec3-af80d706114c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.404658 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.422074 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j27l\" (UniqueName: \"kubernetes.io/projected/6f6037a0-cf83-47af-9425-bad91534bffb-kube-api-access-4j27l\") pod \"package-server-manager-789f6589d5-v22km\" (UID: \"6f6037a0-cf83-47af-9425-bad91534bffb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.430920 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.441877 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsqkf\" (UniqueName: \"kubernetes.io/projected/c7adf0f4-d376-4e6d-85a7-4d301b0513fd-kube-api-access-dsqkf\") pod \"catalog-operator-68c6474976-tcpcd\" (UID: \"c7adf0f4-d376-4e6d-85a7-4d301b0513fd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.461797 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.469152 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kllnm\" (UniqueName: \"kubernetes.io/projected/37ceeb88-98e7-4849-8ea3-8a1486d29314-kube-api-access-kllnm\") pod \"olm-operator-6b444d44fb-m6jg2\" (UID: \"37ceeb88-98e7-4849-8ea3-8a1486d29314\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.484019 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.484307 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:50.984295451 +0000 UTC m=+158.710292491 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.503007 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.504249 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4mx2\" (UniqueName: \"kubernetes.io/projected/58f537d3-f443-4553-a858-1b1455b98281-kube-api-access-x4mx2\") pod \"dns-default-dffgt\" (UID: \"58f537d3-f443-4553-a858-1b1455b98281\") " pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.505589 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.507589 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h454z\" (UniqueName: \"kubernetes.io/projected/98bc47ef-be35-4fd0-8413-ccc346ada61c-kube-api-access-h454z\") pod \"service-ca-9c57cc56f-t7c7l\" (UID: \"98bc47ef-be35-4fd0-8413-ccc346ada61c\") " pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.509495 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.513589 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.522890 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f95688d7-dd9b-4c04-b696-42b8f54a2018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wjlfz\" (UID: \"f95688d7-dd9b-4c04-b696-42b8f54a2018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.533248 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.547524 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.567829 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk4qs\" (UniqueName: \"kubernetes.io/projected/3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f-kube-api-access-hk4qs\") pod \"machine-config-server-lx9lk\" (UID: \"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f\") " pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.584929 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.585927 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.085912235 +0000 UTC m=+158.811909275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.590179 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bk5h\" (UniqueName: \"kubernetes.io/projected/871258f1-44d2-4a1b-934d-c20aaf2094c4-kube-api-access-2bk5h\") pod \"ingress-canary-kbwgz\" (UID: \"871258f1-44d2-4a1b-934d-c20aaf2094c4\") " pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.601905 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzwm7\" (UniqueName: \"kubernetes.io/projected/bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52-kube-api-access-zzwm7\") pod \"csi-hostpathplugin-7nt4n\" (UID: \"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52\") " pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.613624 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod494f22d4_5ac0_4975_86fa_86cc2b1b3306.slice/crio-f6011392965391cb34bfa3d04af604d173fac70956b361303d7203371eae832e WatchSource:0}: Error finding container f6011392965391cb34bfa3d04af604d173fac70956b361303d7203371eae832e: Status 404 returned error can't find the container with id f6011392965391cb34bfa3d04af604d173fac70956b361303d7203371eae832e Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639303 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" event={"ID":"2100bb3f-455d-4d24-a5c6-e0c818f5137d","Type":"ContainerStarted","Data":"b8153307144f5d567483b003397e3494d085b7093062f93c48c57934c717005e"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639383 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639407 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" event={"ID":"2100bb3f-455d-4d24-a5c6-e0c818f5137d","Type":"ContainerStarted","Data":"7a5e6b178c475e3bd949543c734d7cf980aaa17dd0bd2474270c7560df1d4665"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" event={"ID":"9a775603-788c-43d8-92d9-b5383855ed57","Type":"ContainerStarted","Data":"f6bff47ce9f2c523b932c7ad926460b400e6166d7bd4dc7b940d8f63a723c5f3"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639448 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" event={"ID":"9a775603-788c-43d8-92d9-b5383855ed57","Type":"ContainerStarted","Data":"18f8af7f60e92be403b3ccb32d21058e3cb5849d88064c7dd44683a3a5a9cf19"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639465 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" event={"ID":"9a775603-788c-43d8-92d9-b5383855ed57","Type":"ContainerStarted","Data":"da99b8061ef686f103aa83cffd058d11c1476b66e858f37b2e451a312007aa7f"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639509 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" event={"ID":"ac9a6e96-9e1f-4791-8956-04dff6809ccd","Type":"ContainerStarted","Data":"4ddd2f6217a9cac7d3f42654052a903748ce4aff2caf5d638a8fecac1c9e40c1"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639527 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" event={"ID":"ac9a6e96-9e1f-4791-8956-04dff6809ccd","Type":"ContainerStarted","Data":"2c00cd09fbdaccd4ee86818bf5abdeb1b52e7117dedd228b269aa96d5a2a4a61"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639545 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" event={"ID":"ac9a6e96-9e1f-4791-8956-04dff6809ccd","Type":"ContainerStarted","Data":"85a8bb8c2c35eac058f88569c855ec172939f67cdab0551c2c013a1c55d934cb"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639562 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" event={"ID":"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8","Type":"ContainerStarted","Data":"06fbe978a25dea4b034f029998877afeb894e69c6246594e6936d614f7f8bad7"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" event={"ID":"0170aa22-9c08-4a04-910b-00c852ebca97","Type":"ContainerStarted","Data":"b03d8ab6065974f0594c66517ff508f2e5f3e34c619a48e5306bbece9c87a677"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639598 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" event={"ID":"b78007fd-f257-499b-a02a-dd6cf90ef471","Type":"ContainerStarted","Data":"493fdc4e4a6da6659293ed3fc9664f1f0616ee1de2e5742f0ffc45246e720953"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639618 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" event={"ID":"b78007fd-f257-499b-a02a-dd6cf90ef471","Type":"ContainerStarted","Data":"d8886b9e19cc167285b7ccfec503da050321a89de5709a7ec984d2926b074c57"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.639662 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" event={"ID":"b78007fd-f257-499b-a02a-dd6cf90ef471","Type":"ContainerStarted","Data":"3ca302bad0680ca78f3f3898b438d4f95d18eccde3eb2e1c4ef3337c62bc1256"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.643075 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.658512 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4acb57f-4cef-4cb5-aa9b-cdce33f8de60" containerID="38a4421378f261c754b48693f4a8a29eb8d7d5d51de254dbbab9132cc5d8859f" exitCode=0 Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.658590 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" event={"ID":"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60","Type":"ContainerDied","Data":"38a4421378f261c754b48693f4a8a29eb8d7d5d51de254dbbab9132cc5d8859f"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.658617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" event={"ID":"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60","Type":"ContainerStarted","Data":"cd8a6f64155fb2180676e9ff31502039e9d998821eb46112af22b9772adb2c98"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.664510 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.669332 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" event={"ID":"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e","Type":"ContainerStarted","Data":"8f3abe5f3e43ce3cf2934e6678cd54617b0512f044e18a2e7923bbe209fd0853"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.669368 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" event={"ID":"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e","Type":"ContainerStarted","Data":"53cb5c168e2a8adfcaed83cad612410533bffa1d06d1200b74c76c5c0359e707"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.672789 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" event={"ID":"40cb74c2-0f3d-497b-b23d-97159cc1cb95","Type":"ContainerStarted","Data":"badc9a173d6186a411d6250280b4a7a2f21d6710896d636afa78e2646361ba8a"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.672822 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" event={"ID":"40cb74c2-0f3d-497b-b23d-97159cc1cb95","Type":"ContainerStarted","Data":"ce10008e2882cc6ca4d985fa13bec47f43203b1e66c42fa1b3b2bca87919e980"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.687970 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" event={"ID":"494f22d4-5ac0-4975-86fa-86cc2b1b3306","Type":"ContainerStarted","Data":"f6011392965391cb34bfa3d04af604d173fac70956b361303d7203371eae832e"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.688089 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.688603 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.688852 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.188842344 +0000 UTC m=+158.914839384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.695982 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.718734 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.727462 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw4vb"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.738091 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wscq8" event={"ID":"9e75585a-25ca-4d16-b2ca-33c520e209e1","Type":"ContainerStarted","Data":"0756a416d256a80bf2aa3a481283a9dec115cbc794dfa39811c0bbdbf467c3fb"} Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.744530 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.744534 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f8aefb1_89f9_4ad4_83a0_bdaad8c955b5.slice/crio-807f1ecf34fe67257f1f54962649537df6fe4108ce34bfffdea990548595f728 WatchSource:0}: Error finding container 807f1ecf34fe67257f1f54962649537df6fe4108ce34bfffdea990548595f728: Status 404 returned error can't find the container with id 807f1ecf34fe67257f1f54962649537df6fe4108ce34bfffdea990548595f728 Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.768404 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.790303 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.792982 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.292956305 +0000 UTC m=+159.018953345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.810240 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.814771 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.828509 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4"] Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.849804 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod979cb49e_89dd_4019_ad1c_bae78a50d877.slice/crio-6eca68745f4c1fb76146187955540470daeea89cb7cc7a79cd321359b334d96c WatchSource:0}: Error finding container 6eca68745f4c1fb76146187955540470daeea89cb7cc7a79cd321359b334d96c: Status 404 returned error can't find the container with id 6eca68745f4c1fb76146187955540470daeea89cb7cc7a79cd321359b334d96c Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.851537 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njvdd"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.851639 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lx9lk" Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.853166 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6eddcad1_79bc_4912_8bb4_7d6f5f018b64.slice/crio-7aaea1b7d0a3f8a69852263029f4f9e9315063db8700454cf7048a2ac8af2d97 WatchSource:0}: Error finding container 7aaea1b7d0a3f8a69852263029f4f9e9315063db8700454cf7048a2ac8af2d97: Status 404 returned error can't find the container with id 7aaea1b7d0a3f8a69852263029f4f9e9315063db8700454cf7048a2ac8af2d97 Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.860695 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l55kt" podStartSLOduration=134.860664319 podStartE2EDuration="2m14.860664319s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:50.858980914 +0000 UTC m=+158.584977964" watchObservedRunningTime="2025-12-05 06:48:50.860664319 +0000 UTC m=+158.586661359" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.864748 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kbwgz" Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.876537 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.884076 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b557ce2_a0bf_4bf8_b37d_89d424b68d14.slice/crio-a01acb3742918fa0202101bb765b3b2dd5d709c05b4d183031f35a0b8b18b200 WatchSource:0}: Error finding container a01acb3742918fa0202101bb765b3b2dd5d709c05b4d183031f35a0b8b18b200: Status 404 returned error can't find the container with id a01acb3742918fa0202101bb765b3b2dd5d709c05b4d183031f35a0b8b18b200 Dec 05 06:48:50 crc kubenswrapper[4863]: W1205 06:48:50.888332 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4e03408_7343_4cb3_8cf0_a3b3237a1d9f.slice/crio-7e95a1136c80b4caf95f1d0f4b51da2a49e90a3b16d52b91399693c02f962b4b WatchSource:0}: Error finding container 7e95a1136c80b4caf95f1d0f4b51da2a49e90a3b16d52b91399693c02f962b4b: Status 404 returned error can't find the container with id 7e95a1136c80b4caf95f1d0f4b51da2a49e90a3b16d52b91399693c02f962b4b Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.891903 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:50 crc kubenswrapper[4863]: E1205 06:48:50.892432 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.39241943 +0000 UTC m=+159.118416470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.977853 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.988955 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.990327 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk"] Dec 05 06:48:50 crc kubenswrapper[4863]: I1205 06:48:50.994628 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.019781 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.021162 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.5211307 +0000 UTC m=+159.247127740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.043276 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.115081 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.115413 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.615396796 +0000 UTC m=+159.341393836 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.197617 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode496e045_f40a_4735_b2fa_4212a4d8412d.slice/crio-f6b4612a98e92ddf976e3818ce20a74ad99f02c8402acb848be6df4e754e3630 WatchSource:0}: Error finding container f6b4612a98e92ddf976e3818ce20a74ad99f02c8402acb848be6df4e754e3630: Status 404 returned error can't find the container with id f6b4612a98e92ddf976e3818ce20a74ad99f02c8402acb848be6df4e754e3630 Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.205510 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.207816 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-llph2"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.228274 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.228597 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.72858244 +0000 UTC m=+159.454579480 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.272336 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" podStartSLOduration=135.272315042 podStartE2EDuration="2m15.272315042s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:51.218831369 +0000 UTC m=+158.944828409" watchObservedRunningTime="2025-12-05 06:48:51.272315042 +0000 UTC m=+158.998312072" Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.312697 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3533f6a9_2bcb_4570_8fbe_9cfc97ed4f0f.slice/crio-97697084781d10a0a3124a6c45fecdb47aa0997c542c337089373b08662fa0f4 WatchSource:0}: Error finding container 97697084781d10a0a3124a6c45fecdb47aa0997c542c337089373b08662fa0f4: Status 404 returned error can't find the container with id 97697084781d10a0a3124a6c45fecdb47aa0997c542c337089373b08662fa0f4 Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.330094 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.330392 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.830381218 +0000 UTC m=+159.556378258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.340526 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9"] Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.349292 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50db14aa_39fc_45f0_bccf_2fbe8e5e1ed4.slice/crio-ca45814397adb0dce36731067cdd3effc5b439905ae38a64690a02121afb7c2d WatchSource:0}: Error finding container ca45814397adb0dce36731067cdd3effc5b439905ae38a64690a02121afb7c2d: Status 404 returned error can't find the container with id ca45814397adb0dce36731067cdd3effc5b439905ae38a64690a02121afb7c2d Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.361868 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdkmd" podStartSLOduration=135.361845762 podStartE2EDuration="2m15.361845762s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:51.353790376 +0000 UTC m=+159.079787416" watchObservedRunningTime="2025-12-05 06:48:51.361845762 +0000 UTC m=+159.087842802" Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.369154 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa1c6030_fddf_4fe5_a6d2_55bdce963b06.slice/crio-486f295184feb5b2a8eda83737a7cce0646ebb21ddf525f9290f1d72f31e5e27 WatchSource:0}: Error finding container 486f295184feb5b2a8eda83737a7cce0646ebb21ddf525f9290f1d72f31e5e27: Status 404 returned error can't find the container with id 486f295184feb5b2a8eda83737a7cce0646ebb21ddf525f9290f1d72f31e5e27 Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.394514 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaab23073_3d1c_4559_a876_f2e300029ad4.slice/crio-838bc06d291bd5e8032a7d823bf44646f5d087a194c8b83902c031b25ccd4d0b WatchSource:0}: Error finding container 838bc06d291bd5e8032a7d823bf44646f5d087a194c8b83902c031b25ccd4d0b: Status 404 returned error can't find the container with id 838bc06d291bd5e8032a7d823bf44646f5d087a194c8b83902c031b25ccd4d0b Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.434990 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.435371 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:51.935355121 +0000 UTC m=+159.661352161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.503686 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcrtg"] Dec 05 06:48:51 crc kubenswrapper[4863]: W1205 06:48:51.514693 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd383f927_13d0_4003_acff_216eb565846b.slice/crio-1c2276e89c5c3fba8aecb5a022c9f1700c931c10223e859e346beebf25ddff02 WatchSource:0}: Error finding container 1c2276e89c5c3fba8aecb5a022c9f1700c931c10223e859e346beebf25ddff02: Status 404 returned error can't find the container with id 1c2276e89c5c3fba8aecb5a022c9f1700c931c10223e859e346beebf25ddff02 Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.537071 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.537429 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.037418137 +0000 UTC m=+159.763415177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.537791 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.538178 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7tgbj"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.545754 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nmq82" podStartSLOduration=135.54574328 podStartE2EDuration="2m15.54574328s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:51.543549072 +0000 UTC m=+159.269546112" watchObservedRunningTime="2025-12-05 06:48:51.54574328 +0000 UTC m=+159.271740320" Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.617895 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-5wjml" podStartSLOduration=135.617879254 podStartE2EDuration="2m15.617879254s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:51.588965329 +0000 UTC m=+159.314962359" watchObservedRunningTime="2025-12-05 06:48:51.617879254 +0000 UTC m=+159.343876294" Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.644080 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.644340 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.144325523 +0000 UTC m=+159.870322563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.668167 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dffgt"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.677398 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-l447b"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.745153 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.745441 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.245428943 +0000 UTC m=+159.971425983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.788852 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.859112 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" event={"ID":"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8","Type":"ContainerStarted","Data":"f6b78bc3038024f921a1b2042bc38bab55552a44f0632f048cb4ceb21cb3b069"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.872201 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.872645 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.372629622 +0000 UTC m=+160.098626662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.879660 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t7c7l"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.892777 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" event={"ID":"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50","Type":"ContainerStarted","Data":"2992879b39ddf7ed4517e8e44dd6a23a2b83e72c2b3cbff5e5be02619f9601a4"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.892812 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" event={"ID":"d4e818aa-ebd1-4fc8-bf0f-baa83e430e50","Type":"ContainerStarted","Data":"b6351e642d74d53d305625ef095741a74834b93c2fe9e5e8c7a12f8f82fce0bf"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.900357 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" event={"ID":"7b557ce2-a0bf-4bf8-b37d-89d424b68d14","Type":"ContainerStarted","Data":"a01acb3742918fa0202101bb765b3b2dd5d709c05b4d183031f35a0b8b18b200"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.902298 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.903985 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" event={"ID":"aab23073-3d1c-4559-a876-f2e300029ad4","Type":"ContainerStarted","Data":"838bc06d291bd5e8032a7d823bf44646f5d087a194c8b83902c031b25ccd4d0b"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.904061 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kbwgz"] Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.939895 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" event={"ID":"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f","Type":"ContainerStarted","Data":"839724d54d7d8e44967c5f712dd6e74f79d4b4ec6a70028c582bee382ad34ba9"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.939936 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" event={"ID":"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f","Type":"ContainerStarted","Data":"7e95a1136c80b4caf95f1d0f4b51da2a49e90a3b16d52b91399693c02f962b4b"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.950372 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" event={"ID":"a0db948e-d372-433d-8e87-26d82acaa96f","Type":"ContainerStarted","Data":"50e1b85b657e6fc851076aba4044fd8337727e119137c17b939ab888c346b9df"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.960782 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" event={"ID":"494f22d4-5ac0-4975-86fa-86cc2b1b3306","Type":"ContainerStarted","Data":"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.961621 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.967872 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.969967 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wscq8" event={"ID":"9e75585a-25ca-4d16-b2ca-33c520e209e1","Type":"ContainerStarted","Data":"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.976033 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:51 crc kubenswrapper[4863]: E1205 06:48:51.977383 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.477368248 +0000 UTC m=+160.203365278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.988091 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" event={"ID":"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5","Type":"ContainerStarted","Data":"67221aa089e1514deadafffb10c483b6817e4db8925bda23587071f62e1dffaa"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.988146 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" event={"ID":"4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5","Type":"ContainerStarted","Data":"807f1ecf34fe67257f1f54962649537df6fe4108ce34bfffdea990548595f728"} Dec 05 06:48:51 crc kubenswrapper[4863]: I1205 06:48:51.989208 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.003999 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" event={"ID":"0170aa22-9c08-4a04-910b-00c852ebca97","Type":"ContainerStarted","Data":"e730a5d5e8704ccbe3b4fa82f483fb9e421dba0a4366269b25b213faa7ba1605"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.033839 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7tgbj" event={"ID":"648d25d7-c956-48d0-936b-053e5ae954be","Type":"ContainerStarted","Data":"f225dcc28d4b4d627e2cb696912cd01d1c36b5c30b96717b08555e20c65fb7f2"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.049072 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" event={"ID":"6a1b2516-a4dc-40ef-aec3-af80d706114c","Type":"ContainerStarted","Data":"dd047f05245257b4c1872162b454c73825f5b8ede79738de7b4eb6df9fa64619"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.069166 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz"] Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.077587 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.077788 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.577757289 +0000 UTC m=+160.303754329 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.077933 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.079302 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.57928654 +0000 UTC m=+160.305283580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.102366 4863 generic.go:334] "Generic (PLEG): container finished" podID="78143d95-87cc-4aa5-acd1-7ad8674dfbd9" containerID="54e855088a69716ddf6a36cdd97c5ad416e3d58c4c4e45495bc03009f24be9a9" exitCode=0 Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.102446 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" event={"ID":"78143d95-87cc-4aa5-acd1-7ad8674dfbd9","Type":"ContainerDied","Data":"54e855088a69716ddf6a36cdd97c5ad416e3d58c4c4e45495bc03009f24be9a9"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.102497 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" event={"ID":"78143d95-87cc-4aa5-acd1-7ad8674dfbd9","Type":"ContainerStarted","Data":"ff586d0ece35417ae1532df17be2b7d111ef47c86a95552d6a9ff96c3d7c3717"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.134797 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpht4" podStartSLOduration=136.134782178 podStartE2EDuration="2m16.134782178s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.133228766 +0000 UTC m=+159.859225806" watchObservedRunningTime="2025-12-05 06:48:52.134782178 +0000 UTC m=+159.860779218" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.141680 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7nt4n"] Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.145666 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" event={"ID":"f4acb57f-4cef-4cb5-aa9b-cdce33f8de60","Type":"ContainerStarted","Data":"816951d94f108f2c3f29e5a44d1883f9c439b230a1aa597794f381708b3bc18b"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.146838 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" event={"ID":"f41784ac-d179-4d00-9cde-21e8683daf48","Type":"ContainerStarted","Data":"7ee46ac0b754687750f1964c7cd1213f1cb97273847b08a302614862f1f3e133"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.147784 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2"] Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.148427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-m58b7" event={"ID":"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1","Type":"ContainerStarted","Data":"319e294ee8c3e22c4e2548b2bd044f281a9199e6069eea043d00b39ad9fa7a38"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.148449 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-m58b7" event={"ID":"e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1","Type":"ContainerStarted","Data":"a1df3f867245c7d78a1d696bbbe3c36fb58ef80dc0dcbd589fef5fac32bf3a0f"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.165437 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" event={"ID":"979cb49e-89dd-4019-ad1c-bae78a50d877","Type":"ContainerStarted","Data":"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.173233 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.173292 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" event={"ID":"979cb49e-89dd-4019-ad1c-bae78a50d877","Type":"ContainerStarted","Data":"6eca68745f4c1fb76146187955540470daeea89cb7cc7a79cd321359b334d96c"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.173312 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" event={"ID":"0ea32fb0-42fe-4249-b4b6-a7784203b5dd","Type":"ContainerStarted","Data":"f9074db4fd6a64a14cadfc53f7ab7bcdff7e111179979b5d4ba337ecbaaeeb25"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.180306 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.181460 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.681439968 +0000 UTC m=+160.407436998 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.213758 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" event={"ID":"f27898ec-5484-4679-a44a-a06bcd9e9e4c","Type":"ContainerStarted","Data":"ece7f66d28e58e05d2baeadf8e611d9ebd1f7fe27d6ad2652454aeb9a118485c"} Dec 05 06:48:52 crc kubenswrapper[4863]: W1205 06:48:52.217032 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcc3fd1c_ffd9_4a54_a912_e11d8a7aca52.slice/crio-ae4309ae6b4ae531d781495833333300bc2b04e43464cf667a41e77d1bd9be95 WatchSource:0}: Error finding container ae4309ae6b4ae531d781495833333300bc2b04e43464cf667a41e77d1bd9be95: Status 404 returned error can't find the container with id ae4309ae6b4ae531d781495833333300bc2b04e43464cf667a41e77d1bd9be95 Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.224061 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" event={"ID":"e496e045-f40a-4735-b2fa-4212a4d8412d","Type":"ContainerStarted","Data":"f6b4612a98e92ddf976e3818ce20a74ad99f02c8402acb848be6df4e754e3630"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.226178 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" event={"ID":"d383f927-13d0-4003-acff-216eb565846b","Type":"ContainerStarted","Data":"1c2276e89c5c3fba8aecb5a022c9f1700c931c10223e859e346beebf25ddff02"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.274620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" event={"ID":"aa1c6030-fddf-4fe5-a6d2-55bdce963b06","Type":"ContainerStarted","Data":"486f295184feb5b2a8eda83737a7cce0646ebb21ddf525f9290f1d72f31e5e27"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.282075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.283040 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.783003131 +0000 UTC m=+160.509000161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.306540 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" podStartSLOduration=136.306520491 podStartE2EDuration="2m16.306520491s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.300022827 +0000 UTC m=+160.026019867" watchObservedRunningTime="2025-12-05 06:48:52.306520491 +0000 UTC m=+160.032517531" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.332492 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" podStartSLOduration=136.332461846 podStartE2EDuration="2m16.332461846s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.331016457 +0000 UTC m=+160.057013497" watchObservedRunningTime="2025-12-05 06:48:52.332461846 +0000 UTC m=+160.058458886" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.358142 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" event={"ID":"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4","Type":"ContainerStarted","Data":"ca45814397adb0dce36731067cdd3effc5b439905ae38a64690a02121afb7c2d"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.361792 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lx9lk" event={"ID":"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f","Type":"ContainerStarted","Data":"97697084781d10a0a3124a6c45fecdb47aa0997c542c337089373b08662fa0f4"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.368840 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" podStartSLOduration=136.368828061 podStartE2EDuration="2m16.368828061s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.368248066 +0000 UTC m=+160.094245106" watchObservedRunningTime="2025-12-05 06:48:52.368828061 +0000 UTC m=+160.094825101" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.383165 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.383562 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.883539585 +0000 UTC m=+160.609536625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.388448 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" event={"ID":"f9eb1c3d-a038-4b8a-a352-c906a1e53e94","Type":"ContainerStarted","Data":"0f2aa44a540a16df46069b1de13da9b22541cfd1c4dfbe3cb568176652f56db6"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.407260 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" event={"ID":"6eddcad1-79bc-4912-8bb4-7d6f5f018b64","Type":"ContainerStarted","Data":"6782a8d6fcdd735b39f34a3b56a11a6343d916a27046fa73eae89548e14ddd10"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.407295 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" event={"ID":"6eddcad1-79bc-4912-8bb4-7d6f5f018b64","Type":"ContainerStarted","Data":"7aaea1b7d0a3f8a69852263029f4f9e9315063db8700454cf7048a2ac8af2d97"} Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.410999 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wscq8" podStartSLOduration=136.41098022 podStartE2EDuration="2m16.41098022s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.402580055 +0000 UTC m=+160.128577085" watchObservedRunningTime="2025-12-05 06:48:52.41098022 +0000 UTC m=+160.136977260" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.436089 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" podStartSLOduration=136.436075563 podStartE2EDuration="2m16.436075563s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.435364834 +0000 UTC m=+160.161361874" watchObservedRunningTime="2025-12-05 06:48:52.436075563 +0000 UTC m=+160.162072603" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.437980 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.443923 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:52 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:52 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:52 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.443979 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.465889 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" podStartSLOduration=136.465868931 podStartE2EDuration="2m16.465868931s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.464988308 +0000 UTC m=+160.190985348" watchObservedRunningTime="2025-12-05 06:48:52.465868931 +0000 UTC m=+160.191865971" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.485068 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.485424 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:52.985413285 +0000 UTC m=+160.711410325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.544512 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-m58b7" podStartSLOduration=136.544495019 podStartE2EDuration="2m16.544495019s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.543166534 +0000 UTC m=+160.269163574" watchObservedRunningTime="2025-12-05 06:48:52.544495019 +0000 UTC m=+160.270492059" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.544756 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tdck6" podStartSLOduration=136.544750836 podStartE2EDuration="2m16.544750836s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.500268974 +0000 UTC m=+160.226266014" watchObservedRunningTime="2025-12-05 06:48:52.544750836 +0000 UTC m=+160.270747876" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.592592 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.594068 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-976gc" podStartSLOduration=136.594052857 podStartE2EDuration="2m16.594052857s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:52.591999652 +0000 UTC m=+160.317996692" watchObservedRunningTime="2025-12-05 06:48:52.594052857 +0000 UTC m=+160.320049897" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.595180 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.093376349 +0000 UTC m=+160.819373389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.693681 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.693978 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.193968185 +0000 UTC m=+160.919965225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.794814 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.794980 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.294955771 +0000 UTC m=+161.020952811 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.795496 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.795768 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.295756493 +0000 UTC m=+161.021753533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.798787 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.896150 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:52 crc kubenswrapper[4863]: E1205 06:48:52.896736 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.396721889 +0000 UTC m=+161.122718929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.990570 4863 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-b2ln6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 06:48:52 crc kubenswrapper[4863]: I1205 06:48:52.990630 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" podUID="4f8aefb1-89f9-4ad4-83a0-bdaad8c955b5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.12:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.008982 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.009545 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.509531693 +0000 UTC m=+161.235528733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.114077 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.114270 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.614229419 +0000 UTC m=+161.340226459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.137317 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.137861 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.637847482 +0000 UTC m=+161.363844522 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.239872 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.240553 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.740538615 +0000 UTC m=+161.466535655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.341172 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.341432 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.841421048 +0000 UTC m=+161.567418088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.438966 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:53 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:53 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:53 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.439189 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.446613 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.446796 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" event={"ID":"aa1c6030-fddf-4fe5-a6d2-55bdce963b06","Type":"ContainerStarted","Data":"692fdfb3e9b7c13e77025a6446ab8e79ae63e4bc7bde0dbe94e7b248585dfa81"} Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.446902 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:53.946887255 +0000 UTC m=+161.672884295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.457079 4863 generic.go:334] "Generic (PLEG): container finished" podID="7b557ce2-a0bf-4bf8-b37d-89d424b68d14" containerID="529e87b0aa458ccf5df2e71b6cc637309fdb1119430ceb49687ba56e1a16d762" exitCode=0 Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.457150 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" event={"ID":"7b557ce2-a0bf-4bf8-b37d-89d424b68d14","Type":"ContainerDied","Data":"529e87b0aa458ccf5df2e71b6cc637309fdb1119430ceb49687ba56e1a16d762"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.464265 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-26tcr" event={"ID":"0170aa22-9c08-4a04-910b-00c852ebca97","Type":"ContainerStarted","Data":"2bafd3dd43e3fdebc0625d27ffcc67fc333b9eaf6fd4037a453bf7061d2fbc77"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.477277 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lt45w" podStartSLOduration=137.477262619 podStartE2EDuration="2m17.477262619s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.463981963 +0000 UTC m=+161.189979003" watchObservedRunningTime="2025-12-05 06:48:53.477262619 +0000 UTC m=+161.203259659" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.501752 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" event={"ID":"4db68aa4-711a-4795-ad76-64d3dc63e61b","Type":"ContainerStarted","Data":"62299a32533407035ea54537961c229028f51d8136c78adb49b671795bf7e7af"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.532361 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" event={"ID":"f41784ac-d179-4d00-9cde-21e8683daf48","Type":"ContainerStarted","Data":"a302b3c86bbc9fa7cd35327b75673bcc583d839e656230032a7181ebf0b2c5dd"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.540417 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" event={"ID":"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4","Type":"ContainerStarted","Data":"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.541315 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.544671 4863 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-c878b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.544699 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.547809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.549630 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.049612228 +0000 UTC m=+161.775609268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.554315 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" event={"ID":"a0db948e-d372-433d-8e87-26d82acaa96f","Type":"ContainerStarted","Data":"155af425b5988329da24db0f1aaa6e400d9985d14beaca03b3aab178e1ba5ff9"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.600906 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w9xlr" podStartSLOduration=137.600889822 podStartE2EDuration="2m17.600889822s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.568089324 +0000 UTC m=+161.294086354" watchObservedRunningTime="2025-12-05 06:48:53.600889822 +0000 UTC m=+161.326886862" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.601210 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-llph2" podStartSLOduration=137.601205501 podStartE2EDuration="2m17.601205501s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.601078217 +0000 UTC m=+161.327075257" watchObservedRunningTime="2025-12-05 06:48:53.601205501 +0000 UTC m=+161.327202541" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.603071 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" event={"ID":"6f6037a0-cf83-47af-9425-bad91534bffb","Type":"ContainerStarted","Data":"6192338ffce64d3586404463b0695b48898de47bb1f04087b317ef5126de66d5"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.603119 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" event={"ID":"6f6037a0-cf83-47af-9425-bad91534bffb","Type":"ContainerStarted","Data":"3b9338ef4fb5a7869c8a10bb5459a080b89ce9b1151c9f6c123435b8d8b2e57c"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.603526 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.608865 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" event={"ID":"e496e045-f40a-4735-b2fa-4212a4d8412d","Type":"ContainerStarted","Data":"9e844c3b7d91d0195cb318a9f222948300d0185ccea3ca7d5b5eee2a14125016"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.610380 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" event={"ID":"c7adf0f4-d376-4e6d-85a7-4d301b0513fd","Type":"ContainerStarted","Data":"175afa90a9f79c16c097cd1170a7ab2122b92050b5e0e02b93ab1e7200097f79"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.610929 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.622254 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" podStartSLOduration=137.622239394 podStartE2EDuration="2m17.622239394s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.620699563 +0000 UTC m=+161.346696603" watchObservedRunningTime="2025-12-05 06:48:53.622239394 +0000 UTC m=+161.348236424" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.627796 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" event={"ID":"78143d95-87cc-4aa5-acd1-7ad8674dfbd9","Type":"ContainerStarted","Data":"1ed97c69eb899e62dc9484701dd66c67107da0bf93cb950e8d3ee48ecdaec579"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.628302 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.631803 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" event={"ID":"f95688d7-dd9b-4c04-b696-42b8f54a2018","Type":"ContainerStarted","Data":"6f900a8b623d7a3a8fd755aab36b25487dca6ffe3437043f5ac3a3bbd2f2f52b"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.649598 4863 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-tcpcd container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.649650 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" podUID="c7adf0f4-d376-4e6d-85a7-4d301b0513fd" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.650193 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" podStartSLOduration=137.650182883 podStartE2EDuration="2m17.650182883s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.650147192 +0000 UTC m=+161.376144232" watchObservedRunningTime="2025-12-05 06:48:53.650182883 +0000 UTC m=+161.376179923" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.650636 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.651632 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.151617312 +0000 UTC m=+161.877614352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.673448 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" event={"ID":"aab23073-3d1c-4559-a876-f2e300029ad4","Type":"ContainerStarted","Data":"1b368fe66b1ae0c8cd37345e75672562a2612c3933e91d7d045a47575bc67a91"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.683990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" event={"ID":"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52","Type":"ContainerStarted","Data":"ae4309ae6b4ae531d781495833333300bc2b04e43464cf667a41e77d1bd9be95"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.696990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" event={"ID":"f9eb1c3d-a038-4b8a-a352-c906a1e53e94","Type":"ContainerStarted","Data":"2269944ce033fc8d829b396a65fe730e82ecff478ff29008975fe8adaa07f4ad"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.715012 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kbwgz" event={"ID":"871258f1-44d2-4a1b-934d-c20aaf2094c4","Type":"ContainerStarted","Data":"04027881faa50490bf098fd8371227ae69b1da403b3e96044a910bd7d6213464"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.715049 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kbwgz" event={"ID":"871258f1-44d2-4a1b-934d-c20aaf2094c4","Type":"ContainerStarted","Data":"46f799812478cd9aca5222823c976d7e7257f93b0620ac30a1039f80c4adcfee"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.738221 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" event={"ID":"f2f7af7f-6dfe-440d-b35c-b44a516c2dd8","Type":"ContainerStarted","Data":"b40fc14f49a8fd5cce3ce5dfb1c7f182ec89f221f4e8afbaf8c61ed20e1dd60b"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.739319 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fbcwk" podStartSLOduration=137.739305082 podStartE2EDuration="2m17.739305082s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.697018619 +0000 UTC m=+161.423015659" watchObservedRunningTime="2025-12-05 06:48:53.739305082 +0000 UTC m=+161.465302122" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.741198 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" podStartSLOduration=137.741192733 podStartE2EDuration="2m17.741192733s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.738186472 +0000 UTC m=+161.464183512" watchObservedRunningTime="2025-12-05 06:48:53.741192733 +0000 UTC m=+161.467189773" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.741762 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" event={"ID":"f27898ec-5484-4679-a44a-a06bcd9e9e4c","Type":"ContainerStarted","Data":"74f40d4e2733c959ac8af3b8e9ec107906a8fcd3ebdc063f08a797f665532627"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.753361 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.772694 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.255187838 +0000 UTC m=+161.981184878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.772846 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7tgbj" event={"ID":"648d25d7-c956-48d0-936b-053e5ae954be","Type":"ContainerStarted","Data":"8145500cfa31c927f9702439d131efe9472d649863c6a757fa7da29b729c6a9a"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.779232 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.799608 4863 patch_prober.go:28] interesting pod/downloads-7954f5f757-7tgbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.799656 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7tgbj" podUID="648d25d7-c956-48d0-936b-053e5ae954be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.799995 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" event={"ID":"d383f927-13d0-4003-acff-216eb565846b","Type":"ContainerStarted","Data":"10383ac81e21f8b3571628ec08c079ffa98baa5750e300b872482ebaa54b2804"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.802219 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" podStartSLOduration=137.802201028 podStartE2EDuration="2m17.802201028s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.800627926 +0000 UTC m=+161.526624966" watchObservedRunningTime="2025-12-05 06:48:53.802201028 +0000 UTC m=+161.528198068" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.826135 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" event={"ID":"98bc47ef-be35-4fd0-8413-ccc346ada61c","Type":"ContainerStarted","Data":"e51570e1cdb4083e4b77287ecd956e81ebb5de94f238369e59b1151558c3f9b8"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.826177 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" event={"ID":"98bc47ef-be35-4fd0-8413-ccc346ada61c","Type":"ContainerStarted","Data":"2614313f36067c101b90199c1bab39750e7cc0487f1ea8e12366a5ebcc9d83c6"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.833566 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-kbwgz" podStartSLOduration=6.833550768 podStartE2EDuration="6.833550768s" podCreationTimestamp="2025-12-05 06:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.831814512 +0000 UTC m=+161.557811552" watchObservedRunningTime="2025-12-05 06:48:53.833550768 +0000 UTC m=+161.559547808" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.856121 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" event={"ID":"37ceeb88-98e7-4849-8ea3-8a1486d29314","Type":"ContainerStarted","Data":"3c49308db70df46d736e7dc83ff71a59d070fa49261fdee408874a5c6dad9631"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.857010 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.857896 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.859051 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.359037391 +0000 UTC m=+162.085034431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.859833 4863 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-m6jg2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.859881 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" podUID="37ceeb88-98e7-4849-8ea3-8a1486d29314" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.876230 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" podStartSLOduration=137.876213832 podStartE2EDuration="2m17.876213832s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.874965758 +0000 UTC m=+161.600962798" watchObservedRunningTime="2025-12-05 06:48:53.876213832 +0000 UTC m=+161.602210872" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.879063 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" event={"ID":"e4e03408-7343-4cb3-8cf0-a3b3237a1d9f","Type":"ContainerStarted","Data":"7092f84453430c82c698ed62cd328348e32b74dd0e9f4b64a0ef7f608d17b129"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.894334 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dffgt" event={"ID":"58f537d3-f443-4553-a858-1b1455b98281","Type":"ContainerStarted","Data":"234446ff2def9ba2610a3c28ec36c85d97bbf1c2b6c20b5bcd924bc5e603fbeb"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.919985 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-f2b9m" podStartSLOduration=137.919965794 podStartE2EDuration="2m17.919965794s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.91831529 +0000 UTC m=+161.644312330" watchObservedRunningTime="2025-12-05 06:48:53.919965794 +0000 UTC m=+161.645962834" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.940258 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" event={"ID":"0ea32fb0-42fe-4249-b4b6-a7784203b5dd","Type":"ContainerStarted","Data":"1ad80ab20b34edb40af09db63d9c04a30f656a4c7e7f4c6ee3767da6ba5f96e1"} Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.940702 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.943587 4863 patch_prober.go:28] interesting pod/console-operator-58897d9998-rcrtg container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.943632 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" podUID="0ea32fb0-42fe-4249-b4b6-a7784203b5dd" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.956780 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" podStartSLOduration=137.95676472 podStartE2EDuration="2m17.95676472s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:53.955585828 +0000 UTC m=+161.681582868" watchObservedRunningTime="2025-12-05 06:48:53.95676472 +0000 UTC m=+161.682761760" Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.959463 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:53 crc kubenswrapper[4863]: E1205 06:48:53.960768 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.460755667 +0000 UTC m=+162.186752707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:53 crc kubenswrapper[4863]: I1205 06:48:53.991535 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lx9lk" event={"ID":"3533f6a9-2bcb-4570-8fbe-9cfc97ed4f0f","Type":"ContainerStarted","Data":"31428b682a0debf327997ba5ab4ad32f113d2bc0f4811be0b9728ace778f191c"} Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.013631 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b2ln6" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.029137 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" podStartSLOduration=138.0291199 podStartE2EDuration="2m18.0291199s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.003412141 +0000 UTC m=+161.729409181" watchObservedRunningTime="2025-12-05 06:48:54.0291199 +0000 UTC m=+161.755116940" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.030903 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-t7c7l" podStartSLOduration=138.030895797 podStartE2EDuration="2m18.030895797s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.028658128 +0000 UTC m=+161.754655168" watchObservedRunningTime="2025-12-05 06:48:54.030895797 +0000 UTC m=+161.756892837" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.056508 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7tgbj" podStartSLOduration=138.056494833 podStartE2EDuration="2m18.056494833s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.053840603 +0000 UTC m=+161.779837643" watchObservedRunningTime="2025-12-05 06:48:54.056494833 +0000 UTC m=+161.782491873" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.060246 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.062804 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.562784952 +0000 UTC m=+162.288781992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.063043 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.066249 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.566235614 +0000 UTC m=+162.292232654 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.116875 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fdvqk" podStartSLOduration=138.116857962 podStartE2EDuration="2m18.116857962s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.078642467 +0000 UTC m=+161.804639507" watchObservedRunningTime="2025-12-05 06:48:54.116857962 +0000 UTC m=+161.842855002" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.117131 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" podStartSLOduration=138.117127199 podStartE2EDuration="2m18.117127199s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.114795276 +0000 UTC m=+161.840792316" watchObservedRunningTime="2025-12-05 06:48:54.117127199 +0000 UTC m=+161.843124239" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.165894 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.166158 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.666131252 +0000 UTC m=+162.392128292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.166441 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.166807 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.666796459 +0000 UTC m=+162.392793499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.231665 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" podStartSLOduration=138.231651118 podStartE2EDuration="2m18.231651118s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.198030987 +0000 UTC m=+161.924028027" watchObservedRunningTime="2025-12-05 06:48:54.231651118 +0000 UTC m=+161.957648148" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.267361 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-lx9lk" podStartSLOduration=7.267341934 podStartE2EDuration="7.267341934s" podCreationTimestamp="2025-12-05 06:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.23768261 +0000 UTC m=+161.963679650" watchObservedRunningTime="2025-12-05 06:48:54.267341934 +0000 UTC m=+161.993338975" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.267951 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.268293 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.76827722 +0000 UTC m=+162.494274260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.269075 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8d6x4" podStartSLOduration=138.269069731 podStartE2EDuration="2m18.269069731s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:54.266032549 +0000 UTC m=+161.992029589" watchObservedRunningTime="2025-12-05 06:48:54.269069731 +0000 UTC m=+161.995066761" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.368838 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.369192 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.869175634 +0000 UTC m=+162.595172674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.439306 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:54 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:54 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:54 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.439353 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.470037 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.470235 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.970206762 +0000 UTC m=+162.696203792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.470285 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.470582 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:54.970569621 +0000 UTC m=+162.696566661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.536137 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.536189 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.545125 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.572014 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.572235 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.072188815 +0000 UTC m=+162.798185855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.572301 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.572640 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.072632937 +0000 UTC m=+162.798629977 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.673596 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.674086 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.174071076 +0000 UTC m=+162.900068116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.775279 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.775702 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.275685699 +0000 UTC m=+163.001682739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.876895 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.877071 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.377046575 +0000 UTC m=+163.103043615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.877112 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.877421 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.377414346 +0000 UTC m=+163.103411386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.978671 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.978837 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.478813093 +0000 UTC m=+163.204810133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.978975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:54 crc kubenswrapper[4863]: E1205 06:48:54.979248 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.479235744 +0000 UTC m=+163.205232784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:54 crc kubenswrapper[4863]: I1205 06:48:54.999735 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" event={"ID":"f95688d7-dd9b-4c04-b696-42b8f54a2018","Type":"ContainerStarted","Data":"44b4a094e606c03734ef3893f212a61dcc21c50a2b2bb52e292fcc3b6b53037b"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.002145 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" event={"ID":"37ceeb88-98e7-4849-8ea3-8a1486d29314","Type":"ContainerStarted","Data":"04eafb13d574808d54d680dc6e16bdce978d91c340b9fe99435278a9e6fc7dc9"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.005286 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" event={"ID":"4db68aa4-711a-4795-ad76-64d3dc63e61b","Type":"ContainerStarted","Data":"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.005506 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.012819 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.016303 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" event={"ID":"f9eb1c3d-a038-4b8a-a352-c906a1e53e94","Type":"ContainerStarted","Data":"0d0ee504d214c0f27355c3ce2a2438e9f55f59dc9d4d15ad30feba9d67a928e5"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.022748 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wjlfz" podStartSLOduration=139.022729771 podStartE2EDuration="2m19.022729771s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:55.021512108 +0000 UTC m=+162.747509158" watchObservedRunningTime="2025-12-05 06:48:55.022729771 +0000 UTC m=+162.748726811" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.035262 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" event={"ID":"6f6037a0-cf83-47af-9425-bad91534bffb","Type":"ContainerStarted","Data":"12c433125c85602d9a192380f407bb500dde1862fe0cbac90708c39809392349"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.059418 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m6jg2" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.060082 4863 generic.go:334] "Generic (PLEG): container finished" podID="b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" containerID="8f3abe5f3e43ce3cf2934e6678cd54617b0512f044e18a2e7923bbe209fd0853" exitCode=0 Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.060128 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" event={"ID":"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e","Type":"ContainerDied","Data":"8f3abe5f3e43ce3cf2934e6678cd54617b0512f044e18a2e7923bbe209fd0853"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.063580 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" event={"ID":"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52","Type":"ContainerStarted","Data":"7fd8292f46497373cae2d600037997dd7ca049dbd2487d86f55be520b2b35dbc"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.063605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" event={"ID":"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52","Type":"ContainerStarted","Data":"fd1c26576d45173f24afb625f9f06c79323acaf2ce7c54f41f94942ab80ebeb5"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.065253 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" event={"ID":"c7adf0f4-d376-4e6d-85a7-4d301b0513fd","Type":"ContainerStarted","Data":"f4c5cd17546b237eb15d7e65a41925c2f41630a40bd6ac50939ce859e36c67f1"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.081957 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.082057 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.58204089 +0000 UTC m=+163.308037930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.083430 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.083495 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2gbd9" event={"ID":"d383f927-13d0-4003-acff-216eb565846b","Type":"ContainerStarted","Data":"34d7af82d7a684c201a470bdc13b06334ad2171459747e8784598a00b7fa4142"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.084716 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tcpcd" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.085832 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.585810882 +0000 UTC m=+163.311807922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.100709 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" podStartSLOduration=139.10068984 podStartE2EDuration="2m19.10068984s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:55.070364128 +0000 UTC m=+162.796361178" watchObservedRunningTime="2025-12-05 06:48:55.10068984 +0000 UTC m=+162.826686880" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.102008 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-njvdd" podStartSLOduration=139.102002285 podStartE2EDuration="2m19.102002285s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:55.100639168 +0000 UTC m=+162.826636208" watchObservedRunningTime="2025-12-05 06:48:55.102002285 +0000 UTC m=+162.827999325" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.127590 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" event={"ID":"7b557ce2-a0bf-4bf8-b37d-89d424b68d14","Type":"ContainerStarted","Data":"8f5d557343987f6c45f4a1ec851ee57cb273d3b181f2320d81a6e9be1b786f01"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.127815 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" event={"ID":"7b557ce2-a0bf-4bf8-b37d-89d424b68d14","Type":"ContainerStarted","Data":"95a67ef6fc95dcc74d3391d7000faa2f8141f8ce75bdbe50dc12e614e23100a4"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.129432 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dffgt" event={"ID":"58f537d3-f443-4553-a858-1b1455b98281","Type":"ContainerStarted","Data":"6431ca741a4478f6511fb56893b95675c3abb0322552d6232615d22817f43dde"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.129455 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dffgt" event={"ID":"58f537d3-f443-4553-a858-1b1455b98281","Type":"ContainerStarted","Data":"4721da0ab1654e986b86507cb69fbf1ba00a6b4011b236014d66c7f14e56e709"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.129806 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-dffgt" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.132269 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-96nlv" event={"ID":"aab23073-3d1c-4559-a876-f2e300029ad4","Type":"ContainerStarted","Data":"c75ac39f7b0a181da39e77130aa4389e19c2befbd85cbc866d2d2913bd12c1ae"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.167249 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6g9rw" event={"ID":"6a1b2516-a4dc-40ef-aec3-af80d706114c","Type":"ContainerStarted","Data":"39ca2782301cb66705944b9a72a92c126b21bab96339efa16e0a7b3f8db07729"} Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.168870 4863 patch_prober.go:28] interesting pod/downloads-7954f5f757-7tgbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.168904 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7tgbj" podUID="648d25d7-c956-48d0-936b-053e5ae954be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.181293 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v5f6h" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.184215 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.185698 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.685679428 +0000 UTC m=+163.411676468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.187696 4863 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.205739 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.207236 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-dffgt" podStartSLOduration=8.207211835 podStartE2EDuration="8.207211835s" podCreationTimestamp="2025-12-05 06:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:55.197619197 +0000 UTC m=+162.923616237" watchObservedRunningTime="2025-12-05 06:48:55.207211835 +0000 UTC m=+162.933208865" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.264669 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" podStartSLOduration=139.264650414 podStartE2EDuration="2m19.264650414s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:55.237442815 +0000 UTC m=+162.963439855" watchObservedRunningTime="2025-12-05 06:48:55.264650414 +0000 UTC m=+162.990647454" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.287388 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.291238 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.791224366 +0000 UTC m=+163.517221406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.394658 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.394737 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.89471741 +0000 UTC m=+163.620714450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.394934 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.395327 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.895309377 +0000 UTC m=+163.621306417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.443651 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:55 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:55 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:55 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.443704 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.496017 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.496184 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.99615971 +0000 UTC m=+163.722156750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.496547 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.496820 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:55.996807626 +0000 UTC m=+163.722804666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.597650 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.597822 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.097797993 +0000 UTC m=+163.823795033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.597915 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.598193 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.098181744 +0000 UTC m=+163.824178774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.619499 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-rcrtg" Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.698460 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.698614 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.198588925 +0000 UTC m=+163.924585965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.698719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.699049 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.199041127 +0000 UTC m=+163.925038167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.799867 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.800042 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.300016794 +0000 UTC m=+164.026013834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.800090 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.800383 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.300369163 +0000 UTC m=+164.026366203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.901369 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.901581 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.401553144 +0000 UTC m=+164.127550184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:55 crc kubenswrapper[4863]: I1205 06:48:55.901647 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:55 crc kubenswrapper[4863]: E1205 06:48:55.901923 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.401911604 +0000 UTC m=+164.127908644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.003365 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:56 crc kubenswrapper[4863]: E1205 06:48:56.003536 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.503514717 +0000 UTC m=+164.229511757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.003641 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: E1205 06:48:56.003947 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.503939708 +0000 UTC m=+164.229936748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.031375 4863 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T06:48:55.187714982Z","Handler":null,"Name":""} Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.104830 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:56 crc kubenswrapper[4863]: E1205 06:48:56.104932 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.604916125 +0000 UTC m=+164.330913165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.105251 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: E1205 06:48:56.105530 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 06:48:56.605523232 +0000 UTC m=+164.331520272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rlw7x" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.119777 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.120660 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.124138 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.140419 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.158023 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.179700 4863 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.179810 4863 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.179780 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" event={"ID":"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52","Type":"ContainerStarted","Data":"73d1cb280308b41164b0c5994ecc1ba26d1ee53e9d07dfdb5e41b60254bf63df"} Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.182828 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" event={"ID":"bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52","Type":"ContainerStarted","Data":"ef88dfd0392cc46c0ce3ed29af299a2f55d2c4d3a754d45bdf71129bcd38deef"} Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.183310 4863 patch_prober.go:28] interesting pod/downloads-7954f5f757-7tgbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.183368 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7tgbj" podUID="648d25d7-c956-48d0-936b-053e5ae954be" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.37:8080/\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.214848 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.215044 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.215067 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.215086 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j9sb\" (UniqueName: \"kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.229522 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" podStartSLOduration=9.229503564 podStartE2EDuration="9.229503564s" podCreationTimestamp="2025-12-05 06:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:56.229311529 +0000 UTC m=+163.955308569" watchObservedRunningTime="2025-12-05 06:48:56.229503564 +0000 UTC m=+163.955500604" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.235248 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.298333 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.300836 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.305673 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.314881 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.316351 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.316406 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.316420 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j9sb\" (UniqueName: \"kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.316809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.322267 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.323330 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.333008 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.333055 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.370251 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j9sb\" (UniqueName: \"kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb\") pod \"certified-operators-drxcz\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.397168 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rlw7x\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.417818 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.417862 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgkwc\" (UniqueName: \"kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.417884 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.433516 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.437011 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:56 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:56 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:56 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.437045 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.486751 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.510605 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:48:56 crc kubenswrapper[4863]: E1205 06:48:56.510815 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" containerName="collect-profiles" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.510830 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" containerName="collect-profiles" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.510957 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" containerName="collect-profiles" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.512292 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.519818 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.519871 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgkwc\" (UniqueName: \"kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.519901 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.520258 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.520319 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.525382 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.540319 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgkwc\" (UniqueName: \"kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc\") pod \"community-operators-cpr9z\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.609430 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.614870 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621321 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume\") pod \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621359 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g66bw\" (UniqueName: \"kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw\") pod \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621430 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume\") pod \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\" (UID: \"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e\") " Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621602 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621629 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fr8h\" (UniqueName: \"kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.621666 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.622102 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" (UID: "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.632021 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw" (OuterVolumeSpecName: "kube-api-access-g66bw") pod "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" (UID: "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e"). InnerVolumeSpecName "kube-api-access-g66bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.632180 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" (UID: "b7325423-fd50-4f8b-8a2f-a2e06eaaef6e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.701510 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.703226 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.703539 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.716333 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.722488 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.722926 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.722954 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fr8h\" (UniqueName: \"kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.722992 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.723051 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.723063 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.723072 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g66bw\" (UniqueName: \"kubernetes.io/projected/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e-kube-api-access-g66bw\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.723364 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.723595 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.744404 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fr8h\" (UniqueName: \"kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h\") pod \"certified-operators-92kxn\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.827002 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.827049 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.827099 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb72v\" (UniqueName: \"kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.832369 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.835006 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:48:56 crc kubenswrapper[4863]: W1205 06:48:56.844969 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b9447f4_590b_4d1d_8105_bfad4f700daa.slice/crio-9c07bd9ca720ca8e97d26f40eebda2ce095eb74cea6d8e914dfe9db79b1cf03f WatchSource:0}: Error finding container 9c07bd9ca720ca8e97d26f40eebda2ce095eb74cea6d8e914dfe9db79b1cf03f: Status 404 returned error can't find the container with id 9c07bd9ca720ca8e97d26f40eebda2ce095eb74cea6d8e914dfe9db79b1cf03f Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.900965 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:48:56 crc kubenswrapper[4863]: W1205 06:48:56.910518 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae6653d2_88b7_41b4_880f_aaa0a19c26dc.slice/crio-1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa WatchSource:0}: Error finding container 1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa: Status 404 returned error can't find the container with id 1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.928550 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb72v\" (UniqueName: \"kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.928620 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.928642 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.929233 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.929458 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:56 crc kubenswrapper[4863]: I1205 06:48:56.945578 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb72v\" (UniqueName: \"kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v\") pod \"community-operators-9ngtb\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.000946 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.022953 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.192388 4863 generic.go:334] "Generic (PLEG): container finished" podID="cb921038-e831-47ea-af78-e21e51079af7" containerID="a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3" exitCode=0 Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.192577 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerDied","Data":"a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.192881 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerStarted","Data":"f09a5d83ea9f3b5553a8e0b862ffd0a9d76ad912a8ace287ea8bbaaccb35b126"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.198335 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.202701 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerID="122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726" exitCode=0 Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.202883 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerDied","Data":"122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.202928 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerStarted","Data":"9c07bd9ca720ca8e97d26f40eebda2ce095eb74cea6d8e914dfe9db79b1cf03f"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.207497 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.213900 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" event={"ID":"ae6653d2-88b7-41b4-880f-aaa0a19c26dc","Type":"ContainerStarted","Data":"32f93a12451f1aa62f4826fb9e9d9acfea366557bbfa2610ff5244ccf4c93f4c"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.213937 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" event={"ID":"ae6653d2-88b7-41b4-880f-aaa0a19c26dc","Type":"ContainerStarted","Data":"1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.214036 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.217124 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" event={"ID":"b7325423-fd50-4f8b-8a2f-a2e06eaaef6e","Type":"ContainerDied","Data":"53cb5c168e2a8adfcaed83cad612410533bffa1d06d1200b74c76c5c0359e707"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.217154 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53cb5c168e2a8adfcaed83cad612410533bffa1d06d1200b74c76c5c0359e707" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.217208 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.220093 4863 generic.go:334] "Generic (PLEG): container finished" podID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerID="0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3" exitCode=0 Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.221433 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerDied","Data":"0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3"} Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.221466 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerStarted","Data":"193ff1b36902efcd2b982485f6ba11ddd8b2db490a5e13d8152dfdddce8698db"} Dec 05 06:48:57 crc kubenswrapper[4863]: W1205 06:48:57.236416 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc5afbb7_616a_44bd_83ce_c464c642b7c4.slice/crio-a254b09657785f241d068f6831f805ad4b2bb04249c483af31024adb94c8913d WatchSource:0}: Error finding container a254b09657785f241d068f6831f805ad4b2bb04249c483af31024adb94c8913d: Status 404 returned error can't find the container with id a254b09657785f241d068f6831f805ad4b2bb04249c483af31024adb94c8913d Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.253494 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" podStartSLOduration=141.253451288 podStartE2EDuration="2m21.253451288s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:48:57.247048976 +0000 UTC m=+164.973046016" watchObservedRunningTime="2025-12-05 06:48:57.253451288 +0000 UTC m=+164.979448318" Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.435171 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:57 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:57 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:57 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:57 crc kubenswrapper[4863]: I1205 06:48:57.435269 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.232164 4863 generic.go:334] "Generic (PLEG): container finished" podID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerID="ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c" exitCode=0 Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.232255 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerDied","Data":"ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c"} Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.232507 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerStarted","Data":"a254b09657785f241d068f6831f805ad4b2bb04249c483af31024adb94c8913d"} Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.300029 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.302650 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.305288 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.313966 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.349754 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.349834 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.349879 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txwqv\" (UniqueName: \"kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.433922 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:58 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:58 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:58 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.433973 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.451234 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.451318 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txwqv\" (UniqueName: \"kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.451377 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.452050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.452250 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.474079 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txwqv\" (UniqueName: \"kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv\") pod \"redhat-marketplace-7jp8g\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.622420 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.700864 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.707500 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.707658 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.754433 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsqnw\" (UniqueName: \"kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.754847 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.754873 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.856043 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.856140 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.856242 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsqnw\" (UniqueName: \"kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.856976 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.857181 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.860927 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:48:58 crc kubenswrapper[4863]: I1205 06:48:58.873665 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsqnw\" (UniqueName: \"kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw\") pod \"redhat-marketplace-txjmw\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.021542 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.022119 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.027129 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.027305 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.028973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.032255 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.059761 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.059797 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.160634 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.160921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.160940 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.161050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.166094 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3943e053-ef4c-4348-98a8-cc1473a197f2-metrics-certs\") pod \"network-metrics-daemon-96nzc\" (UID: \"3943e053-ef4c-4348-98a8-cc1473a197f2\") " pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.180642 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.226289 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:48:59 crc kubenswrapper[4863]: W1205 06:48:59.238956 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27275f67_6423_48c7_ad3c_0999649ebf4e.slice/crio-49ba6200e59d41c718486b1b9dee65e7ab760eefaaf623b2e81db56fde2ead08 WatchSource:0}: Error finding container 49ba6200e59d41c718486b1b9dee65e7ab760eefaaf623b2e81db56fde2ead08: Status 404 returned error can't find the container with id 49ba6200e59d41c718486b1b9dee65e7ab760eefaaf623b2e81db56fde2ead08 Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.243369 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerID="ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76" exitCode=0 Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.243418 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerDied","Data":"ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76"} Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.243445 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerStarted","Data":"88ac71343d29bbe6d9029676374b00e7f49d250a21b3c0aff5c763235c92cfd4"} Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.297108 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.298300 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.303249 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.308185 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.354455 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.363101 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgr7v\" (UniqueName: \"kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.363169 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.363201 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.436977 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:48:59 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:48:59 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:48:59 crc kubenswrapper[4863]: healthz check failed Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.437046 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.459922 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-96nzc" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.464162 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgr7v\" (UniqueName: \"kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.464641 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.465119 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.465170 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.465452 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.483538 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgr7v\" (UniqueName: \"kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v\") pod \"redhat-operators-5vcrz\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.587063 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.619041 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.740038 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.741459 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.745577 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.816126 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-96nzc"] Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.821182 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcp9l\" (UniqueName: \"kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.821322 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.821363 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: W1205 06:48:59.838350 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3943e053_ef4c_4348_98a8_cc1473a197f2.slice/crio-17673bc05c5e79f25808188414b97e277b8417ab6a5cf822d822d2ff74118b1d WatchSource:0}: Error finding container 17673bc05c5e79f25808188414b97e277b8417ab6a5cf822d822d2ff74118b1d: Status 404 returned error can't find the container with id 17673bc05c5e79f25808188414b97e277b8417ab6a5cf822d822d2ff74118b1d Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.846585 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.846715 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.849228 4863 patch_prober.go:28] interesting pod/console-f9d7485db-wscq8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.849280 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wscq8" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.857256 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.857308 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.863000 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.922487 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcp9l\" (UniqueName: \"kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.922578 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.922648 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.923823 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.923909 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.940089 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcp9l\" (UniqueName: \"kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l\") pod \"redhat-operators-mhtdv\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:48:59 crc kubenswrapper[4863]: I1205 06:48:59.970641 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:48:59 crc kubenswrapper[4863]: W1205 06:48:59.977087 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod632b1594_6205_4741_a50d_fdd157e0f47e.slice/crio-24e87803e64399f488a995aa021b03708c876f3900abec69561d78cb7d1d64c2 WatchSource:0}: Error finding container 24e87803e64399f488a995aa021b03708c876f3900abec69561d78cb7d1d64c2: Status 404 returned error can't find the container with id 24e87803e64399f488a995aa021b03708c876f3900abec69561d78cb7d1d64c2 Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.053652 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.247129 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.253453 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"65de8ce6-87fc-44d8-9416-0298febcf9cc","Type":"ContainerStarted","Data":"d969c08aed7d65c393487b482fc0f17c48af4fda9676aa95cef29cf446c040ee"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.253534 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"65de8ce6-87fc-44d8-9416-0298febcf9cc","Type":"ContainerStarted","Data":"219813f2f5af281b53b07bf5eafc15d639fe3b05a59cde5c04e46f1d03088681"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.258092 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-96nzc" event={"ID":"3943e053-ef4c-4348-98a8-cc1473a197f2","Type":"ContainerStarted","Data":"17673bc05c5e79f25808188414b97e277b8417ab6a5cf822d822d2ff74118b1d"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.262520 4863 generic.go:334] "Generic (PLEG): container finished" podID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerID="d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe" exitCode=0 Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.262574 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerDied","Data":"d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.262593 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerStarted","Data":"49ba6200e59d41c718486b1b9dee65e7ab760eefaaf623b2e81db56fde2ead08"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.264574 4863 generic.go:334] "Generic (PLEG): container finished" podID="632b1594-6205-4741-a50d-fdd157e0f47e" containerID="0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93" exitCode=0 Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.264685 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.2646749640000001 podStartE2EDuration="1.264674964s" podCreationTimestamp="2025-12-05 06:48:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:49:00.263841983 +0000 UTC m=+167.989839023" watchObservedRunningTime="2025-12-05 06:49:00.264674964 +0000 UTC m=+167.990672004" Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.264885 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerDied","Data":"0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.264923 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerStarted","Data":"24e87803e64399f488a995aa021b03708c876f3900abec69561d78cb7d1d64c2"} Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.270300 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xw4vb" Dec 05 06:49:00 crc kubenswrapper[4863]: W1205 06:49:00.274128 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53e9e031_086a_4945_ac5d_c9594f65072d.slice/crio-5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680 WatchSource:0}: Error finding container 5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680: Status 404 returned error can't find the container with id 5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680 Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.432765 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.436335 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:00 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:00 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:00 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.436391 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:00 crc kubenswrapper[4863]: I1205 06:49:00.517880 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7tgbj" Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.288227 4863 generic.go:334] "Generic (PLEG): container finished" podID="65de8ce6-87fc-44d8-9416-0298febcf9cc" containerID="d969c08aed7d65c393487b482fc0f17c48af4fda9676aa95cef29cf446c040ee" exitCode=0 Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.288320 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"65de8ce6-87fc-44d8-9416-0298febcf9cc","Type":"ContainerDied","Data":"d969c08aed7d65c393487b482fc0f17c48af4fda9676aa95cef29cf446c040ee"} Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.291378 4863 generic.go:334] "Generic (PLEG): container finished" podID="53e9e031-086a-4945-ac5d-c9594f65072d" containerID="bb64b3de9f5861d8ff84407ac4ccbd05c81f5d04a873374498811d8b3783dd55" exitCode=0 Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.291440 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerDied","Data":"bb64b3de9f5861d8ff84407ac4ccbd05c81f5d04a873374498811d8b3783dd55"} Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.291481 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerStarted","Data":"5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680"} Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.294584 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-96nzc" event={"ID":"3943e053-ef4c-4348-98a8-cc1473a197f2","Type":"ContainerStarted","Data":"cd53c463bad9ca888e2da586192493c078ebdeeb1d2a185bda31dc0eadabcf21"} Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.294611 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-96nzc" event={"ID":"3943e053-ef4c-4348-98a8-cc1473a197f2","Type":"ContainerStarted","Data":"673d6960f0886ff6f43e74d94f8b2e25fc7a7763d71a338fdcdfc838c69a829a"} Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.344707 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-96nzc" podStartSLOduration=145.344691912 podStartE2EDuration="2m25.344691912s" podCreationTimestamp="2025-12-05 06:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:49:01.341226868 +0000 UTC m=+169.067223928" watchObservedRunningTime="2025-12-05 06:49:01.344691912 +0000 UTC m=+169.070688952" Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.435110 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:01 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:01 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:01 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:01 crc kubenswrapper[4863]: I1205 06:49:01.435192 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.434045 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:02 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:02 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:02 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.434340 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.769861 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.770773 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.773286 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.774326 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.775388 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.865011 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.865154 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.966425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.966488 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:02 crc kubenswrapper[4863]: I1205 06:49:02.966572 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:03 crc kubenswrapper[4863]: I1205 06:49:03.002014 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:03 crc kubenswrapper[4863]: I1205 06:49:03.093458 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:03 crc kubenswrapper[4863]: I1205 06:49:03.433696 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:03 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:03 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:03 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:03 crc kubenswrapper[4863]: I1205 06:49:03.433746 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:04 crc kubenswrapper[4863]: I1205 06:49:04.434855 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:04 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:04 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:04 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:04 crc kubenswrapper[4863]: I1205 06:49:04.434911 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:05 crc kubenswrapper[4863]: I1205 06:49:05.434381 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:05 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:05 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:05 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:05 crc kubenswrapper[4863]: I1205 06:49:05.434711 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:05 crc kubenswrapper[4863]: I1205 06:49:05.551145 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-dffgt" Dec 05 06:49:06 crc kubenswrapper[4863]: I1205 06:49:06.434061 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:06 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:06 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:06 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:06 crc kubenswrapper[4863]: I1205 06:49:06.434123 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:07 crc kubenswrapper[4863]: I1205 06:49:07.435730 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:07 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:07 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:07 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:07 crc kubenswrapper[4863]: I1205 06:49:07.436082 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:08 crc kubenswrapper[4863]: I1205 06:49:08.435270 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:08 crc kubenswrapper[4863]: [-]has-synced failed: reason withheld Dec 05 06:49:08 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:08 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:08 crc kubenswrapper[4863]: I1205 06:49:08.435342 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:08 crc kubenswrapper[4863]: I1205 06:49:08.463759 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:49:08 crc kubenswrapper[4863]: I1205 06:49:08.463807 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:49:09 crc kubenswrapper[4863]: I1205 06:49:09.434386 4863 patch_prober.go:28] interesting pod/router-default-5444994796-m58b7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 06:49:09 crc kubenswrapper[4863]: [+]has-synced ok Dec 05 06:49:09 crc kubenswrapper[4863]: [+]process-running ok Dec 05 06:49:09 crc kubenswrapper[4863]: healthz check failed Dec 05 06:49:09 crc kubenswrapper[4863]: I1205 06:49:09.434646 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m58b7" podUID="e9fc12ef-9f42-40d9-b3b0-7d2e729bdfb1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 06:49:09 crc kubenswrapper[4863]: I1205 06:49:09.847302 4863 patch_prober.go:28] interesting pod/console-f9d7485db-wscq8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 05 06:49:09 crc kubenswrapper[4863]: I1205 06:49:09.847369 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wscq8" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 05 06:49:10 crc kubenswrapper[4863]: I1205 06:49:10.433685 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:49:10 crc kubenswrapper[4863]: I1205 06:49:10.436621 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-m58b7" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.214737 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.373847 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir\") pod \"65de8ce6-87fc-44d8-9416-0298febcf9cc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.374159 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access\") pod \"65de8ce6-87fc-44d8-9416-0298febcf9cc\" (UID: \"65de8ce6-87fc-44d8-9416-0298febcf9cc\") " Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.373994 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "65de8ce6-87fc-44d8-9416-0298febcf9cc" (UID: "65de8ce6-87fc-44d8-9416-0298febcf9cc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.374608 4863 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/65de8ce6-87fc-44d8-9416-0298febcf9cc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.378140 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.378161 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"65de8ce6-87fc-44d8-9416-0298febcf9cc","Type":"ContainerDied","Data":"219813f2f5af281b53b07bf5eafc15d639fe3b05a59cde5c04e46f1d03088681"} Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.378334 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="219813f2f5af281b53b07bf5eafc15d639fe3b05a59cde5c04e46f1d03088681" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.381714 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "65de8ce6-87fc-44d8-9416-0298febcf9cc" (UID: "65de8ce6-87fc-44d8-9416-0298febcf9cc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:14 crc kubenswrapper[4863]: I1205 06:49:14.476717 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65de8ce6-87fc-44d8-9416-0298febcf9cc-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:16 crc kubenswrapper[4863]: I1205 06:49:16.711235 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:49:19 crc kubenswrapper[4863]: I1205 06:49:19.854260 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:49:19 crc kubenswrapper[4863]: I1205 06:49:19.861448 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 06:49:20 crc kubenswrapper[4863]: I1205 06:49:20.253689 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 06:49:24 crc kubenswrapper[4863]: E1205 06:49:24.456225 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 06:49:24 crc kubenswrapper[4863]: E1205 06:49:24.456799 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7j9sb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-drxcz_openshift-marketplace(cb921038-e831-47ea-af78-e21e51079af7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 06:49:24 crc kubenswrapper[4863]: E1205 06:49:24.458038 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-drxcz" podUID="cb921038-e831-47ea-af78-e21e51079af7" Dec 05 06:49:26 crc kubenswrapper[4863]: E1205 06:49:26.164034 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-drxcz" podUID="cb921038-e831-47ea-af78-e21e51079af7" Dec 05 06:49:26 crc kubenswrapper[4863]: E1205 06:49:26.239650 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 06:49:26 crc kubenswrapper[4863]: E1205 06:49:26.239861 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6fr8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-92kxn_openshift-marketplace(d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 06:49:26 crc kubenswrapper[4863]: E1205 06:49:26.241935 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-92kxn" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" Dec 05 06:49:27 crc kubenswrapper[4863]: E1205 06:49:27.724662 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-92kxn" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.872048 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage930794168/1\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.872675 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zcp9l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mhtdv_openshift-marketplace(53e9e031-086a-4945-ac5d-c9594f65072d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage930794168/1\": happened during read: context canceled" logger="UnhandledError" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.873939 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage930794168/1\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-operators-mhtdv" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.927464 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.927692 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-txwqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7jp8g_openshift-marketplace(1b94ae89-c171-471a-bd34-12ed57a752b8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.928821 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7jp8g" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.972137 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.972305 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mb72v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9ngtb_openshift-marketplace(cc5afbb7-616a-44bd-83ce-c464c642b7c4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 06:49:28 crc kubenswrapper[4863]: E1205 06:49:28.973506 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9ngtb" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" Dec 05 06:49:30 crc kubenswrapper[4863]: I1205 06:49:30.702282 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v22km" Dec 05 06:49:31 crc kubenswrapper[4863]: E1205 06:49:31.938307 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mhtdv" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" Dec 05 06:49:31 crc kubenswrapper[4863]: E1205 06:49:31.938699 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7jp8g" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" Dec 05 06:49:32 crc kubenswrapper[4863]: E1205 06:49:32.029700 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9ngtb" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.434488 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 06:49:32 crc kubenswrapper[4863]: W1205 06:49:32.458667 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8911b050_c2af_4d61_9deb_e9065d292f99.slice/crio-781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de WatchSource:0}: Error finding container 781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de: Status 404 returned error can't find the container with id 781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.485636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerStarted","Data":"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944"} Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.487362 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerStarted","Data":"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b"} Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.488594 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8911b050-c2af-4d61-9deb-e9065d292f99","Type":"ContainerStarted","Data":"781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de"} Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.491026 4863 generic.go:334] "Generic (PLEG): container finished" podID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerID="c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973" exitCode=0 Dec 05 06:49:32 crc kubenswrapper[4863]: I1205 06:49:32.491080 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerDied","Data":"c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973"} Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.501214 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerID="702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944" exitCode=0 Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.501658 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerDied","Data":"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944"} Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.505193 4863 generic.go:334] "Generic (PLEG): container finished" podID="632b1594-6205-4741-a50d-fdd157e0f47e" containerID="67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b" exitCode=0 Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.505664 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerDied","Data":"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b"} Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.509216 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8911b050-c2af-4d61-9deb-e9065d292f99","Type":"ContainerStarted","Data":"c29c95de8a245d6625ed8f057d017f7025ed4d8336c5b9843b33e869f51b6bc7"} Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.513409 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerStarted","Data":"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8"} Dec 05 06:49:33 crc kubenswrapper[4863]: I1205 06:49:33.596996 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-txjmw" podStartSLOduration=3.007506681 podStartE2EDuration="35.596963805s" podCreationTimestamp="2025-12-05 06:48:58 +0000 UTC" firstStartedPulling="2025-12-05 06:49:00.272286459 +0000 UTC m=+167.998283499" lastFinishedPulling="2025-12-05 06:49:32.861743553 +0000 UTC m=+200.587740623" observedRunningTime="2025-12-05 06:49:33.583999134 +0000 UTC m=+201.309996184" watchObservedRunningTime="2025-12-05 06:49:33.596963805 +0000 UTC m=+201.322960865" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.070567 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=32.070548998 podStartE2EDuration="32.070548998s" podCreationTimestamp="2025-12-05 06:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:49:34.069899711 +0000 UTC m=+201.795896761" watchObservedRunningTime="2025-12-05 06:49:34.070548998 +0000 UTC m=+201.796546048" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.520943 4863 generic.go:334] "Generic (PLEG): container finished" podID="8911b050-c2af-4d61-9deb-e9065d292f99" containerID="c29c95de8a245d6625ed8f057d017f7025ed4d8336c5b9843b33e869f51b6bc7" exitCode=0 Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.521020 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8911b050-c2af-4d61-9deb-e9065d292f99","Type":"ContainerDied","Data":"c29c95de8a245d6625ed8f057d017f7025ed4d8336c5b9843b33e869f51b6bc7"} Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.564702 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 06:49:34 crc kubenswrapper[4863]: E1205 06:49:34.564918 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65de8ce6-87fc-44d8-9416-0298febcf9cc" containerName="pruner" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.564935 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="65de8ce6-87fc-44d8-9416-0298febcf9cc" containerName="pruner" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.565073 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="65de8ce6-87fc-44d8-9416-0298febcf9cc" containerName="pruner" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.565530 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.616610 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.752863 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.752941 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.854134 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.854214 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.854287 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.877279 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:34 crc kubenswrapper[4863]: I1205 06:49:34.886233 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.287484 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 06:49:35 crc kubenswrapper[4863]: W1205 06:49:35.298004 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda6268305_1a0b_4bef_81f3_d0a26674cd87.slice/crio-6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a WatchSource:0}: Error finding container 6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a: Status 404 returned error can't find the container with id 6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.529386 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a6268305-1a0b-4bef-81f3-d0a26674cd87","Type":"ContainerStarted","Data":"6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a"} Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.531664 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerStarted","Data":"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340"} Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.534370 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerStarted","Data":"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136"} Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.551990 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5vcrz" podStartSLOduration=2.371187236 podStartE2EDuration="36.551973222s" podCreationTimestamp="2025-12-05 06:48:59 +0000 UTC" firstStartedPulling="2025-12-05 06:49:00.275003992 +0000 UTC m=+168.001001032" lastFinishedPulling="2025-12-05 06:49:34.455789978 +0000 UTC m=+202.181787018" observedRunningTime="2025-12-05 06:49:35.549757725 +0000 UTC m=+203.275754765" watchObservedRunningTime="2025-12-05 06:49:35.551973222 +0000 UTC m=+203.277970262" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.572113 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cpr9z" podStartSLOduration=2.382161946 podStartE2EDuration="39.572097575s" podCreationTimestamp="2025-12-05 06:48:56 +0000 UTC" firstStartedPulling="2025-12-05 06:48:57.204110595 +0000 UTC m=+164.930107635" lastFinishedPulling="2025-12-05 06:49:34.394046224 +0000 UTC m=+202.120043264" observedRunningTime="2025-12-05 06:49:35.57190529 +0000 UTC m=+203.297902350" watchObservedRunningTime="2025-12-05 06:49:35.572097575 +0000 UTC m=+203.298094615" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.830146 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.968364 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access\") pod \"8911b050-c2af-4d61-9deb-e9065d292f99\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.968678 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir\") pod \"8911b050-c2af-4d61-9deb-e9065d292f99\" (UID: \"8911b050-c2af-4d61-9deb-e9065d292f99\") " Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.968812 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8911b050-c2af-4d61-9deb-e9065d292f99" (UID: "8911b050-c2af-4d61-9deb-e9065d292f99"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.969037 4863 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8911b050-c2af-4d61-9deb-e9065d292f99-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:35 crc kubenswrapper[4863]: I1205 06:49:35.974762 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8911b050-c2af-4d61-9deb-e9065d292f99" (UID: "8911b050-c2af-4d61-9deb-e9065d292f99"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.070526 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8911b050-c2af-4d61-9deb-e9065d292f99-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.541172 4863 generic.go:334] "Generic (PLEG): container finished" podID="a6268305-1a0b-4bef-81f3-d0a26674cd87" containerID="29d16b76cda528b7964303ae5a13937faa3f247535ecd77c003cb0169372cf63" exitCode=0 Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.541250 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a6268305-1a0b-4bef-81f3-d0a26674cd87","Type":"ContainerDied","Data":"29d16b76cda528b7964303ae5a13937faa3f247535ecd77c003cb0169372cf63"} Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.542669 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8911b050-c2af-4d61-9deb-e9065d292f99","Type":"ContainerDied","Data":"781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de"} Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.542716 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="781313e462a150b8870460f7495c0a85f615b557de8e6a466882ef5ada3f99de" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.542752 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.615376 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.615423 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:49:36 crc kubenswrapper[4863]: I1205 06:49:36.700601 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:49:37 crc kubenswrapper[4863]: I1205 06:49:37.904974 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.093782 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir\") pod \"a6268305-1a0b-4bef-81f3-d0a26674cd87\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.093913 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access\") pod \"a6268305-1a0b-4bef-81f3-d0a26674cd87\" (UID: \"a6268305-1a0b-4bef-81f3-d0a26674cd87\") " Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.093909 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a6268305-1a0b-4bef-81f3-d0a26674cd87" (UID: "a6268305-1a0b-4bef-81f3-d0a26674cd87"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.094972 4863 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6268305-1a0b-4bef-81f3-d0a26674cd87-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.108124 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a6268305-1a0b-4bef-81f3-d0a26674cd87" (UID: "a6268305-1a0b-4bef-81f3-d0a26674cd87"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.195785 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6268305-1a0b-4bef-81f3-d0a26674cd87-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.464058 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.464124 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.464168 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.464756 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.464856 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c" gracePeriod=600 Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.555537 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.555576 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a6268305-1a0b-4bef-81f3-d0a26674cd87","Type":"ContainerDied","Data":"6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a"} Dec 05 06:49:38 crc kubenswrapper[4863]: I1205 06:49:38.555631 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6350208ff9b32312b4edc38ab82abb65020dae912609094d5e82050b08a2876a" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.030261 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.030310 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.114004 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.561628 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c" exitCode=0 Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.562415 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c"} Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.562444 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2"} Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.602645 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.622723 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:49:39 crc kubenswrapper[4863]: I1205 06:49:39.622752 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:49:40 crc kubenswrapper[4863]: I1205 06:49:40.576651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerStarted","Data":"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda"} Dec 05 06:49:40 crc kubenswrapper[4863]: I1205 06:49:40.659418 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5vcrz" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="registry-server" probeResult="failure" output=< Dec 05 06:49:40 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 06:49:40 crc kubenswrapper[4863]: > Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.487213 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.583317 4863 generic.go:334] "Generic (PLEG): container finished" podID="cb921038-e831-47ea-af78-e21e51079af7" containerID="eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda" exitCode=0 Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.583383 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerDied","Data":"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda"} Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.583591 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-txjmw" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="registry-server" containerID="cri-o://ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8" gracePeriod=2 Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.762038 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 06:49:41 crc kubenswrapper[4863]: E1205 06:49:41.762697 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6268305-1a0b-4bef-81f3-d0a26674cd87" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.762831 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6268305-1a0b-4bef-81f3-d0a26674cd87" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: E1205 06:49:41.762952 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8911b050-c2af-4d61-9deb-e9065d292f99" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.763085 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8911b050-c2af-4d61-9deb-e9065d292f99" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.763383 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8911b050-c2af-4d61-9deb-e9065d292f99" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.763535 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6268305-1a0b-4bef-81f3-d0a26674cd87" containerName="pruner" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.764184 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.765783 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.766232 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.772308 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.944415 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.944844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:41 crc kubenswrapper[4863]: I1205 06:49:41.944899 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.046641 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.046692 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.046741 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.046860 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.046903 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.065628 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access\") pod \"installer-9-crc\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.106165 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.124372 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.248641 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsqnw\" (UniqueName: \"kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw\") pod \"27275f67-6423-48c7-ad3c-0999649ebf4e\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.248898 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content\") pod \"27275f67-6423-48c7-ad3c-0999649ebf4e\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.248954 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities\") pod \"27275f67-6423-48c7-ad3c-0999649ebf4e\" (UID: \"27275f67-6423-48c7-ad3c-0999649ebf4e\") " Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.250343 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities" (OuterVolumeSpecName: "utilities") pod "27275f67-6423-48c7-ad3c-0999649ebf4e" (UID: "27275f67-6423-48c7-ad3c-0999649ebf4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.251789 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw" (OuterVolumeSpecName: "kube-api-access-qsqnw") pod "27275f67-6423-48c7-ad3c-0999649ebf4e" (UID: "27275f67-6423-48c7-ad3c-0999649ebf4e"). InnerVolumeSpecName "kube-api-access-qsqnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.286116 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27275f67-6423-48c7-ad3c-0999649ebf4e" (UID: "27275f67-6423-48c7-ad3c-0999649ebf4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.349789 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsqnw\" (UniqueName: \"kubernetes.io/projected/27275f67-6423-48c7-ad3c-0999649ebf4e-kube-api-access-qsqnw\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.349816 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.349826 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27275f67-6423-48c7-ad3c-0999649ebf4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:42 crc kubenswrapper[4863]: W1205 06:49:42.535434 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf0f9656f_c10c_416a_998e_0a39b3641e55.slice/crio-b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf WatchSource:0}: Error finding container b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf: Status 404 returned error can't find the container with id b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.538259 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.588775 4863 generic.go:334] "Generic (PLEG): container finished" podID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerID="3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e" exitCode=0 Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.588853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerDied","Data":"3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e"} Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.593284 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerStarted","Data":"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3"} Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.595278 4863 generic.go:334] "Generic (PLEG): container finished" podID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerID="ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8" exitCode=0 Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.595344 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txjmw" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.595354 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerDied","Data":"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8"} Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.595394 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txjmw" event={"ID":"27275f67-6423-48c7-ad3c-0999649ebf4e","Type":"ContainerDied","Data":"49ba6200e59d41c718486b1b9dee65e7ab760eefaaf623b2e81db56fde2ead08"} Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.595411 4863 scope.go:117] "RemoveContainer" containerID="ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.598536 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f0f9656f-c10c-416a-998e-0a39b3641e55","Type":"ContainerStarted","Data":"b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf"} Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.614498 4863 scope.go:117] "RemoveContainer" containerID="c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.650045 4863 scope.go:117] "RemoveContainer" containerID="d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.655623 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-drxcz" podStartSLOduration=1.844358398 podStartE2EDuration="46.655567197s" podCreationTimestamp="2025-12-05 06:48:56 +0000 UTC" firstStartedPulling="2025-12-05 06:48:57.197984961 +0000 UTC m=+164.923982001" lastFinishedPulling="2025-12-05 06:49:42.00919376 +0000 UTC m=+209.735190800" observedRunningTime="2025-12-05 06:49:42.654237504 +0000 UTC m=+210.380234544" watchObservedRunningTime="2025-12-05 06:49:42.655567197 +0000 UTC m=+210.381564237" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.668045 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.671085 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-txjmw"] Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.674861 4863 scope.go:117] "RemoveContainer" containerID="ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8" Dec 05 06:49:42 crc kubenswrapper[4863]: E1205 06:49:42.675430 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8\": container with ID starting with ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8 not found: ID does not exist" containerID="ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.675526 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8"} err="failed to get container status \"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8\": rpc error: code = NotFound desc = could not find container \"ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8\": container with ID starting with ba25b7dd5ad72a415ff66af9b8aa1c094559f11f4aa451cbb97169af6f1ffcf8 not found: ID does not exist" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.675635 4863 scope.go:117] "RemoveContainer" containerID="c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973" Dec 05 06:49:42 crc kubenswrapper[4863]: E1205 06:49:42.675928 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973\": container with ID starting with c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973 not found: ID does not exist" containerID="c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.676006 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973"} err="failed to get container status \"c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973\": rpc error: code = NotFound desc = could not find container \"c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973\": container with ID starting with c4353f02b7e36bcffbd292c78a4b1068080b85d0c222bb8289889676f4a40973 not found: ID does not exist" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.676071 4863 scope.go:117] "RemoveContainer" containerID="d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe" Dec 05 06:49:42 crc kubenswrapper[4863]: E1205 06:49:42.676923 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe\": container with ID starting with d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe not found: ID does not exist" containerID="d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe" Dec 05 06:49:42 crc kubenswrapper[4863]: I1205 06:49:42.677070 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe"} err="failed to get container status \"d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe\": rpc error: code = NotFound desc = could not find container \"d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe\": container with ID starting with d74eee2255488448675257f5e3827a36656a8f64f182ba2302c4d2f2ddd0b9fe not found: ID does not exist" Dec 05 06:49:43 crc kubenswrapper[4863]: I1205 06:49:43.606228 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f0f9656f-c10c-416a-998e-0a39b3641e55","Type":"ContainerStarted","Data":"76149f0d5c1d8c19ae90c9f0f2884027c2ee867b3f105eea01ec278f8c747cf7"} Dec 05 06:49:43 crc kubenswrapper[4863]: I1205 06:49:43.609023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerStarted","Data":"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c"} Dec 05 06:49:43 crc kubenswrapper[4863]: I1205 06:49:43.619821 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.619801158 podStartE2EDuration="2.619801158s" podCreationTimestamp="2025-12-05 06:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:49:43.618516535 +0000 UTC m=+211.344513585" watchObservedRunningTime="2025-12-05 06:49:43.619801158 +0000 UTC m=+211.345798188" Dec 05 06:49:43 crc kubenswrapper[4863]: I1205 06:49:43.634493 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-92kxn" podStartSLOduration=1.883888641 podStartE2EDuration="47.634455672s" podCreationTimestamp="2025-12-05 06:48:56 +0000 UTC" firstStartedPulling="2025-12-05 06:48:57.223602388 +0000 UTC m=+164.949599428" lastFinishedPulling="2025-12-05 06:49:42.974169429 +0000 UTC m=+210.700166459" observedRunningTime="2025-12-05 06:49:43.632401979 +0000 UTC m=+211.358399019" watchObservedRunningTime="2025-12-05 06:49:43.634455672 +0000 UTC m=+211.360452712" Dec 05 06:49:44 crc kubenswrapper[4863]: I1205 06:49:44.613927 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" path="/var/lib/kubelet/pods/27275f67-6423-48c7-ad3c-0999649ebf4e/volumes" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.434340 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.434399 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.508951 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.655979 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.833738 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.833780 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:46 crc kubenswrapper[4863]: I1205 06:49:46.898305 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:48 crc kubenswrapper[4863]: I1205 06:49:48.654054 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerStarted","Data":"c54b3e417651f350eb943e5035a732af9866d3918f53db492520e7d10c77f3ee"} Dec 05 06:49:48 crc kubenswrapper[4863]: I1205 06:49:48.658102 4863 generic.go:334] "Generic (PLEG): container finished" podID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerID="ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644" exitCode=0 Dec 05 06:49:48 crc kubenswrapper[4863]: I1205 06:49:48.658206 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerDied","Data":"ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644"} Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.661102 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.677022 4863 generic.go:334] "Generic (PLEG): container finished" podID="53e9e031-086a-4945-ac5d-c9594f65072d" containerID="c54b3e417651f350eb943e5035a732af9866d3918f53db492520e7d10c77f3ee" exitCode=0 Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.677094 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerDied","Data":"c54b3e417651f350eb943e5035a732af9866d3918f53db492520e7d10c77f3ee"} Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.681323 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerID="dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91" exitCode=0 Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.681413 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerDied","Data":"dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91"} Dec 05 06:49:49 crc kubenswrapper[4863]: I1205 06:49:49.695693 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:49:52 crc kubenswrapper[4863]: I1205 06:49:52.703102 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerStarted","Data":"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7"} Dec 05 06:49:52 crc kubenswrapper[4863]: I1205 06:49:52.705449 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerStarted","Data":"ba78471894551319427a629f6294389dcab18a2de510afaee0caf7065c73e1e5"} Dec 05 06:49:52 crc kubenswrapper[4863]: I1205 06:49:52.709019 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerStarted","Data":"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3"} Dec 05 06:49:52 crc kubenswrapper[4863]: I1205 06:49:52.724821 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7jp8g" podStartSLOduration=1.435645386 podStartE2EDuration="54.724798363s" podCreationTimestamp="2025-12-05 06:48:58 +0000 UTC" firstStartedPulling="2025-12-05 06:48:59.245048487 +0000 UTC m=+166.971045527" lastFinishedPulling="2025-12-05 06:49:52.534201464 +0000 UTC m=+220.260198504" observedRunningTime="2025-12-05 06:49:52.721835408 +0000 UTC m=+220.447832478" watchObservedRunningTime="2025-12-05 06:49:52.724798363 +0000 UTC m=+220.450795423" Dec 05 06:49:52 crc kubenswrapper[4863]: I1205 06:49:52.744212 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9ngtb" podStartSLOduration=2.520240318 podStartE2EDuration="56.744194558s" podCreationTimestamp="2025-12-05 06:48:56 +0000 UTC" firstStartedPulling="2025-12-05 06:48:58.235030287 +0000 UTC m=+165.961027327" lastFinishedPulling="2025-12-05 06:49:52.458984517 +0000 UTC m=+220.184981567" observedRunningTime="2025-12-05 06:49:52.743804478 +0000 UTC m=+220.469801528" watchObservedRunningTime="2025-12-05 06:49:52.744194558 +0000 UTC m=+220.470191608" Dec 05 06:49:56 crc kubenswrapper[4863]: I1205 06:49:56.498031 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:49:56 crc kubenswrapper[4863]: I1205 06:49:56.516921 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mhtdv" podStartSLOduration=6.346157433 podStartE2EDuration="57.516901551s" podCreationTimestamp="2025-12-05 06:48:59 +0000 UTC" firstStartedPulling="2025-12-05 06:49:01.296251363 +0000 UTC m=+169.022248403" lastFinishedPulling="2025-12-05 06:49:52.466995471 +0000 UTC m=+220.192992521" observedRunningTime="2025-12-05 06:49:52.766535677 +0000 UTC m=+220.492532747" watchObservedRunningTime="2025-12-05 06:49:56.516901551 +0000 UTC m=+224.242898601" Dec 05 06:49:56 crc kubenswrapper[4863]: I1205 06:49:56.884900 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:57 crc kubenswrapper[4863]: I1205 06:49:57.023657 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:49:57 crc kubenswrapper[4863]: I1205 06:49:57.023714 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:49:57 crc kubenswrapper[4863]: I1205 06:49:57.107825 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:49:57 crc kubenswrapper[4863]: I1205 06:49:57.781570 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:49:58 crc kubenswrapper[4863]: I1205 06:49:58.623707 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:49:58 crc kubenswrapper[4863]: I1205 06:49:58.624752 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:49:58 crc kubenswrapper[4863]: I1205 06:49:58.701719 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:49:58 crc kubenswrapper[4863]: I1205 06:49:58.833629 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.095800 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.096408 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-92kxn" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="registry-server" containerID="cri-o://6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c" gracePeriod=2 Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.397797 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-l447b"] Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.531652 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.590404 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fr8h\" (UniqueName: \"kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h\") pod \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.590504 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content\") pod \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.590597 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities\") pod \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\" (UID: \"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5\") " Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.591576 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities" (OuterVolumeSpecName: "utilities") pod "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" (UID: "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.600641 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h" (OuterVolumeSpecName: "kube-api-access-6fr8h") pod "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" (UID: "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5"). InnerVolumeSpecName "kube-api-access-6fr8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.638172 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" (UID: "d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.691667 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.691701 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fr8h\" (UniqueName: \"kubernetes.io/projected/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-kube-api-access-6fr8h\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.691712 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.770692 4863 generic.go:334] "Generic (PLEG): container finished" podID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerID="6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c" exitCode=0 Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.770771 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-92kxn" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.770774 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerDied","Data":"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c"} Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.770878 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-92kxn" event={"ID":"d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5","Type":"ContainerDied","Data":"193ff1b36902efcd2b982485f6ba11ddd8b2db490a5e13d8152dfdddce8698db"} Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.770943 4863 scope.go:117] "RemoveContainer" containerID="6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.800455 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.807011 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-92kxn"] Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.807139 4863 scope.go:117] "RemoveContainer" containerID="3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.831238 4863 scope.go:117] "RemoveContainer" containerID="0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.858094 4863 scope.go:117] "RemoveContainer" containerID="6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c" Dec 05 06:49:59 crc kubenswrapper[4863]: E1205 06:49:59.858765 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c\": container with ID starting with 6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c not found: ID does not exist" containerID="6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.858805 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c"} err="failed to get container status \"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c\": rpc error: code = NotFound desc = could not find container \"6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c\": container with ID starting with 6f9703a40bf4ce4172e9e7b516ea2b01450b5e836dea6095049b18737e79248c not found: ID does not exist" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.858838 4863 scope.go:117] "RemoveContainer" containerID="3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e" Dec 05 06:49:59 crc kubenswrapper[4863]: E1205 06:49:59.859330 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e\": container with ID starting with 3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e not found: ID does not exist" containerID="3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.859399 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e"} err="failed to get container status \"3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e\": rpc error: code = NotFound desc = could not find container \"3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e\": container with ID starting with 3ec7f8a7a673dd3ca64ead24efa975e937bd5d995b85b282e286705d5376f84e not found: ID does not exist" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.859425 4863 scope.go:117] "RemoveContainer" containerID="0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3" Dec 05 06:49:59 crc kubenswrapper[4863]: E1205 06:49:59.859800 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3\": container with ID starting with 0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3 not found: ID does not exist" containerID="0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3" Dec 05 06:49:59 crc kubenswrapper[4863]: I1205 06:49:59.859838 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3"} err="failed to get container status \"0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3\": rpc error: code = NotFound desc = could not find container \"0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3\": container with ID starting with 0aa1008eb8a6f500bc3a0fa60ec3721d4046cfd51b6932ac588ac6956f4f6ca3 not found: ID does not exist" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.054635 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.054708 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.126014 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.609722 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" path="/var/lib/kubelet/pods/d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5/volumes" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.856748 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.889002 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:50:00 crc kubenswrapper[4863]: I1205 06:50:00.889210 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9ngtb" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="registry-server" containerID="cri-o://bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3" gracePeriod=2 Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.327802 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.436105 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities\") pod \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.436243 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb72v\" (UniqueName: \"kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v\") pod \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.436291 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content\") pod \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\" (UID: \"cc5afbb7-616a-44bd-83ce-c464c642b7c4\") " Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.437016 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities" (OuterVolumeSpecName: "utilities") pod "cc5afbb7-616a-44bd-83ce-c464c642b7c4" (UID: "cc5afbb7-616a-44bd-83ce-c464c642b7c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.440525 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v" (OuterVolumeSpecName: "kube-api-access-mb72v") pod "cc5afbb7-616a-44bd-83ce-c464c642b7c4" (UID: "cc5afbb7-616a-44bd-83ce-c464c642b7c4"). InnerVolumeSpecName "kube-api-access-mb72v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.493643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc5afbb7-616a-44bd-83ce-c464c642b7c4" (UID: "cc5afbb7-616a-44bd-83ce-c464c642b7c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.537919 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.537989 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb72v\" (UniqueName: \"kubernetes.io/projected/cc5afbb7-616a-44bd-83ce-c464c642b7c4-kube-api-access-mb72v\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.538018 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc5afbb7-616a-44bd-83ce-c464c642b7c4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.832809 4863 generic.go:334] "Generic (PLEG): container finished" podID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerID="bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3" exitCode=0 Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.832871 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerDied","Data":"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3"} Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.832905 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ngtb" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.832933 4863 scope.go:117] "RemoveContainer" containerID="bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.832913 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ngtb" event={"ID":"cc5afbb7-616a-44bd-83ce-c464c642b7c4","Type":"ContainerDied","Data":"a254b09657785f241d068f6831f805ad4b2bb04249c483af31024adb94c8913d"} Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.857749 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.859544 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9ngtb"] Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.872421 4863 scope.go:117] "RemoveContainer" containerID="ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.899314 4863 scope.go:117] "RemoveContainer" containerID="ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.925076 4863 scope.go:117] "RemoveContainer" containerID="bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3" Dec 05 06:50:02 crc kubenswrapper[4863]: E1205 06:50:02.925635 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3\": container with ID starting with bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3 not found: ID does not exist" containerID="bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.925689 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3"} err="failed to get container status \"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3\": rpc error: code = NotFound desc = could not find container \"bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3\": container with ID starting with bf84a180a1d900ae1116cf3bfd7573a7410f712f7217bb058e80b079b236b2d3 not found: ID does not exist" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.925724 4863 scope.go:117] "RemoveContainer" containerID="ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644" Dec 05 06:50:02 crc kubenswrapper[4863]: E1205 06:50:02.926557 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644\": container with ID starting with ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644 not found: ID does not exist" containerID="ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.926610 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644"} err="failed to get container status \"ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644\": rpc error: code = NotFound desc = could not find container \"ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644\": container with ID starting with ed1bc4b6afccf872b2046292cde08e61b11a80d0f025816434672e15a8c04644 not found: ID does not exist" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.926647 4863 scope.go:117] "RemoveContainer" containerID="ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c" Dec 05 06:50:02 crc kubenswrapper[4863]: E1205 06:50:02.927109 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c\": container with ID starting with ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c not found: ID does not exist" containerID="ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c" Dec 05 06:50:02 crc kubenswrapper[4863]: I1205 06:50:02.927238 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c"} err="failed to get container status \"ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c\": rpc error: code = NotFound desc = could not find container \"ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c\": container with ID starting with ddaae0baf23bd480ae5d8e31f90dfc818d8d6a943c3f99ca25f3385afff8b40c not found: ID does not exist" Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.493767 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.494029 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mhtdv" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="registry-server" containerID="cri-o://ba78471894551319427a629f6294389dcab18a2de510afaee0caf7065c73e1e5" gracePeriod=2 Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.838990 4863 generic.go:334] "Generic (PLEG): container finished" podID="53e9e031-086a-4945-ac5d-c9594f65072d" containerID="ba78471894551319427a629f6294389dcab18a2de510afaee0caf7065c73e1e5" exitCode=0 Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.839044 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerDied","Data":"ba78471894551319427a629f6294389dcab18a2de510afaee0caf7065c73e1e5"} Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.839088 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhtdv" event={"ID":"53e9e031-086a-4945-ac5d-c9594f65072d","Type":"ContainerDied","Data":"5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680"} Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.839098 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bed903c14b8c602162d1f49569951e1d5db64ea2922b5da4d3d6591e7cd9680" Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.854260 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.953325 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities\") pod \"53e9e031-086a-4945-ac5d-c9594f65072d\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.953556 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcp9l\" (UniqueName: \"kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l\") pod \"53e9e031-086a-4945-ac5d-c9594f65072d\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.954095 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities" (OuterVolumeSpecName: "utilities") pod "53e9e031-086a-4945-ac5d-c9594f65072d" (UID: "53e9e031-086a-4945-ac5d-c9594f65072d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.953594 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content\") pod \"53e9e031-086a-4945-ac5d-c9594f65072d\" (UID: \"53e9e031-086a-4945-ac5d-c9594f65072d\") " Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.954536 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:03 crc kubenswrapper[4863]: I1205 06:50:03.959350 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l" (OuterVolumeSpecName: "kube-api-access-zcp9l") pod "53e9e031-086a-4945-ac5d-c9594f65072d" (UID: "53e9e031-086a-4945-ac5d-c9594f65072d"). InnerVolumeSpecName "kube-api-access-zcp9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.055633 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcp9l\" (UniqueName: \"kubernetes.io/projected/53e9e031-086a-4945-ac5d-c9594f65072d-kube-api-access-zcp9l\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.062341 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53e9e031-086a-4945-ac5d-c9594f65072d" (UID: "53e9e031-086a-4945-ac5d-c9594f65072d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.156368 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e9e031-086a-4945-ac5d-c9594f65072d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.610527 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" path="/var/lib/kubelet/pods/cc5afbb7-616a-44bd-83ce-c464c642b7c4/volumes" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.847920 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhtdv" Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.871610 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:50:04 crc kubenswrapper[4863]: I1205 06:50:04.876765 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mhtdv"] Dec 05 06:50:06 crc kubenswrapper[4863]: I1205 06:50:06.611818 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" path="/var/lib/kubelet/pods/53e9e031-086a-4945-ac5d-c9594f65072d/volumes" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.681826 4863 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.683617 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.683723 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.683894 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.683981 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684076 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.684161 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684244 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.684325 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684413 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.684520 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684613 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.684698 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684780 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.684865 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.684952 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685035 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.685111 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685184 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.685311 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685396 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="extract-utilities" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.685495 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685586 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.685675 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685755 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="extract-content" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.685966 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d88f5ff1-54cd-4fc8-8d1c-a43cc2ed40d5" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.686060 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="27275f67-6423-48c7-ad3c-0999649ebf4e" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.686159 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="53e9e031-086a-4945-ac5d-c9594f65072d" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.686246 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc5afbb7-616a-44bd-83ce-c464c642b7c4" containerName="registry-server" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.686923 4863 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687046 4863 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687046 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.687375 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687467 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687543 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886" gracePeriod=15 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687576 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc" gracePeriod=15 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687643 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b" gracePeriod=15 Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.687583 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687749 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.687787 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687800 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.687821 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687786 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e" gracePeriod=15 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687667 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8" gracePeriod=15 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.687886 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.687991 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688011 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.688028 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688041 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688354 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688380 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688410 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688428 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688445 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.688700 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688730 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.688945 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.696131 4863 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 06:50:20 crc kubenswrapper[4863]: E1205 06:50:20.780700 4863 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.106:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836266 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836296 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836385 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836421 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836441 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836548 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.836583 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937617 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937671 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937704 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937722 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937745 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937773 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937798 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937812 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937873 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937907 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937927 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937950 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937970 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.937988 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.938005 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.938025 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.946747 4863 generic.go:334] "Generic (PLEG): container finished" podID="f0f9656f-c10c-416a-998e-0a39b3641e55" containerID="76149f0d5c1d8c19ae90c9f0f2884027c2ee867b3f105eea01ec278f8c747cf7" exitCode=0 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.946803 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f0f9656f-c10c-416a-998e-0a39b3641e55","Type":"ContainerDied","Data":"76149f0d5c1d8c19ae90c9f0f2884027c2ee867b3f105eea01ec278f8c747cf7"} Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.947556 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.950994 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.953071 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.953952 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc" exitCode=0 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.953989 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8" exitCode=0 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.954002 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b" exitCode=0 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.954012 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e" exitCode=2 Dec 05 06:50:20 crc kubenswrapper[4863]: I1205 06:50:20.954032 4863 scope.go:117] "RemoveContainer" containerID="b6a09cae7ab61935181b29864497480467e8cf7e13a644bcba490b7290bb5e68" Dec 05 06:50:21 crc kubenswrapper[4863]: I1205 06:50:21.082186 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:21 crc kubenswrapper[4863]: E1205 06:50:21.114925 4863 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.106:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e3f0b276a857b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 06:50:21.114295675 +0000 UTC m=+248.840292755,LastTimestamp:2025-12-05 06:50:21.114295675 +0000 UTC m=+248.840292755,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 06:50:21 crc kubenswrapper[4863]: E1205 06:50:21.669203 4863 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.106:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" volumeName="registry-storage" Dec 05 06:50:21 crc kubenswrapper[4863]: I1205 06:50:21.964045 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"d5cde533f538eb2beedf4108efc458d8d9e76d0d964b37c1d1bcc33ef36cd4a9"} Dec 05 06:50:21 crc kubenswrapper[4863]: I1205 06:50:21.964096 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"492d665e53c8555338d990b9bd5b1f86e9055003226cd555ff06af8f5354ff21"} Dec 05 06:50:21 crc kubenswrapper[4863]: E1205 06:50:21.964747 4863 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.106:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:50:21 crc kubenswrapper[4863]: I1205 06:50:21.964748 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:21 crc kubenswrapper[4863]: I1205 06:50:21.967686 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.304075 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.305042 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456335 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir\") pod \"f0f9656f-c10c-416a-998e-0a39b3641e55\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456426 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access\") pod \"f0f9656f-c10c-416a-998e-0a39b3641e55\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456457 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock\") pod \"f0f9656f-c10c-416a-998e-0a39b3641e55\" (UID: \"f0f9656f-c10c-416a-998e-0a39b3641e55\") " Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456457 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f0f9656f-c10c-416a-998e-0a39b3641e55" (UID: "f0f9656f-c10c-416a-998e-0a39b3641e55"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456699 4863 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.456705 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock" (OuterVolumeSpecName: "var-lock") pod "f0f9656f-c10c-416a-998e-0a39b3641e55" (UID: "f0f9656f-c10c-416a-998e-0a39b3641e55"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.463049 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f0f9656f-c10c-416a-998e-0a39b3641e55" (UID: "f0f9656f-c10c-416a-998e-0a39b3641e55"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.558276 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0f9656f-c10c-416a-998e-0a39b3641e55-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.558755 4863 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f0f9656f-c10c-416a-998e-0a39b3641e55-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.608243 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.974969 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f0f9656f-c10c-416a-998e-0a39b3641e55","Type":"ContainerDied","Data":"b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf"} Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.975011 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b78a9a5ccde661c9a5fc7ca93c4666eaaa5fd623dcc6c1803a25b1c0fce30dcf" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.975023 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 06:50:22 crc kubenswrapper[4863]: I1205 06:50:22.979383 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.059462 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.060738 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.061621 4863 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.062309 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.165900 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.165979 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166067 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166084 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166176 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166222 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166509 4863 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166533 4863 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.166549 4863 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.985510 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.986020 4863 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886" exitCode=0 Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.986069 4863 scope.go:117] "RemoveContainer" containerID="f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc" Dec 05 06:50:23 crc kubenswrapper[4863]: I1205 06:50:23.986154 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.008738 4863 scope.go:117] "RemoveContainer" containerID="b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.009057 4863 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.009776 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.030647 4863 scope.go:117] "RemoveContainer" containerID="22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.061685 4863 scope.go:117] "RemoveContainer" containerID="b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.077701 4863 scope.go:117] "RemoveContainer" containerID="a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.096955 4863 scope.go:117] "RemoveContainer" containerID="542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.138066 4863 scope.go:117] "RemoveContainer" containerID="f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.138593 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\": container with ID starting with f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc not found: ID does not exist" containerID="f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.138672 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc"} err="failed to get container status \"f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\": rpc error: code = NotFound desc = could not find container \"f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc\": container with ID starting with f686cd72d055aca859d6bd1d59afae5127ec910a8ea743b454dcb8e6a7f217cc not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.138724 4863 scope.go:117] "RemoveContainer" containerID="b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.139586 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\": container with ID starting with b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8 not found: ID does not exist" containerID="b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.139652 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8"} err="failed to get container status \"b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\": rpc error: code = NotFound desc = could not find container \"b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8\": container with ID starting with b48dee0b9bc0975fd720448dc2e8e9162d64a724c5b8fc0341dd5f3df53158c8 not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.139693 4863 scope.go:117] "RemoveContainer" containerID="22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.140125 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\": container with ID starting with 22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b not found: ID does not exist" containerID="22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.140191 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b"} err="failed to get container status \"22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\": rpc error: code = NotFound desc = could not find container \"22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b\": container with ID starting with 22306fe8f51b1ce7e2ee76356f30f799aa3e327570e7d3ebc24acd8b2396998b not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.140237 4863 scope.go:117] "RemoveContainer" containerID="b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.140711 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\": container with ID starting with b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e not found: ID does not exist" containerID="b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.140767 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e"} err="failed to get container status \"b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\": rpc error: code = NotFound desc = could not find container \"b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e\": container with ID starting with b994305cee72b369a9f3f85315bf82b6a4292e162df35d069ee14853c897af9e not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.140807 4863 scope.go:117] "RemoveContainer" containerID="a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.141265 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\": container with ID starting with a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886 not found: ID does not exist" containerID="a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.141338 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886"} err="failed to get container status \"a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\": rpc error: code = NotFound desc = could not find container \"a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886\": container with ID starting with a971f0f47ebe1030cdfc8908f56a6c66497f3dc9a29aa778a3361743a4c88886 not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.141377 4863 scope.go:117] "RemoveContainer" containerID="542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae" Dec 05 06:50:24 crc kubenswrapper[4863]: E1205 06:50:24.141865 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\": container with ID starting with 542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae not found: ID does not exist" containerID="542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.141911 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae"} err="failed to get container status \"542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\": rpc error: code = NotFound desc = could not find container \"542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae\": container with ID starting with 542b4843fadfa95841ea0d56021a684bf1c2a7e97bd90b961f3120192a31aeae not found: ID does not exist" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.446737 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" containerName="oauth-openshift" containerID="cri-o://93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e" gracePeriod=15 Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.617614 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.965030 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.965763 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.966139 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993139 4863 generic.go:334] "Generic (PLEG): container finished" podID="4db68aa4-711a-4795-ad76-64d3dc63e61b" containerID="93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e" exitCode=0 Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" event={"ID":"4db68aa4-711a-4795-ad76-64d3dc63e61b","Type":"ContainerDied","Data":"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e"} Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993228 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993258 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" event={"ID":"4db68aa4-711a-4795-ad76-64d3dc63e61b","Type":"ContainerDied","Data":"62299a32533407035ea54537961c229028f51d8136c78adb49b671795bf7e7af"} Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993286 4863 scope.go:117] "RemoveContainer" containerID="93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.993962 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:24 crc kubenswrapper[4863]: I1205 06:50:24.994160 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.014315 4863 scope.go:117] "RemoveContainer" containerID="93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.014882 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e\": container with ID starting with 93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e not found: ID does not exist" containerID="93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.014950 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e"} err="failed to get container status \"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e\": rpc error: code = NotFound desc = could not find container \"93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e\": container with ID starting with 93a25738dea9d15b16dbd93dc53613e3350944f007db4963bb2ca3885d91c89e not found: ID does not exist" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.090733 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091150 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzs84\" (UniqueName: \"kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.090901 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091202 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091296 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091345 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091391 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091443 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091526 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091577 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091623 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091659 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091690 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091769 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.091806 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login\") pod \"4db68aa4-711a-4795-ad76-64d3dc63e61b\" (UID: \"4db68aa4-711a-4795-ad76-64d3dc63e61b\") " Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.092810 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.092915 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.092967 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.093655 4863 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.093710 4863 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db68aa4-711a-4795-ad76-64d3dc63e61b-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.093741 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.093773 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.093823 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.099128 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.099962 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84" (OuterVolumeSpecName: "kube-api-access-jzs84") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "kube-api-access-jzs84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.104792 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.105140 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.105807 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.107034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.107399 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.107786 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.107968 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4db68aa4-711a-4795-ad76-64d3dc63e61b" (UID: "4db68aa4-711a-4795-ad76-64d3dc63e61b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195031 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195086 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195107 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195130 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195149 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzs84\" (UniqueName: \"kubernetes.io/projected/4db68aa4-711a-4795-ad76-64d3dc63e61b-kube-api-access-jzs84\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195191 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195211 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195231 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195254 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.195274 4863 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db68aa4-711a-4795-ad76-64d3dc63e61b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.321714 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.322590 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.503661 4863 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.504694 4863 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.505739 4863 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.506198 4863 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.506767 4863 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:25 crc kubenswrapper[4863]: I1205 06:50:25.506841 4863 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.507317 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="200ms" Dec 05 06:50:25 crc kubenswrapper[4863]: E1205 06:50:25.708782 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="400ms" Dec 05 06:50:26 crc kubenswrapper[4863]: E1205 06:50:26.109283 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="800ms" Dec 05 06:50:26 crc kubenswrapper[4863]: E1205 06:50:26.585562 4863 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.106:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e3f0b276a857b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 06:50:21.114295675 +0000 UTC m=+248.840292755,LastTimestamp:2025-12-05 06:50:21.114295675 +0000 UTC m=+248.840292755,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 06:50:26 crc kubenswrapper[4863]: E1205 06:50:26.909899 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="1.6s" Dec 05 06:50:28 crc kubenswrapper[4863]: E1205 06:50:28.511442 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="3.2s" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.602766 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.604242 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.604933 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.626158 4863 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.626204 4863 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:31 crc kubenswrapper[4863]: E1205 06:50:31.626961 4863 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:31 crc kubenswrapper[4863]: I1205 06:50:31.627825 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:31 crc kubenswrapper[4863]: E1205 06:50:31.713884 4863 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.106:6443: connect: connection refused" interval="6.4s" Dec 05 06:50:32 crc kubenswrapper[4863]: I1205 06:50:32.042916 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"89393050050c9084d7fd892197b9f2077a40a3470e871323dcaeaa770134ceed"} Dec 05 06:50:32 crc kubenswrapper[4863]: I1205 06:50:32.611068 4863 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:32 crc kubenswrapper[4863]: I1205 06:50:32.611937 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:32 crc kubenswrapper[4863]: I1205 06:50:32.612362 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.314626 4863 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0c43df504771f4a206dec6009fc4e0a4e120aec9c2cf6bf45277d088ae835078" exitCode=0 Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.314712 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0c43df504771f4a206dec6009fc4e0a4e120aec9c2cf6bf45277d088ae835078"} Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.315071 4863 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.315101 4863 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:33 crc kubenswrapper[4863]: E1205 06:50:33.315701 4863 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.315912 4863 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.316538 4863 status_manager.go:851] "Failed to get status for pod" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" pod="openshift-authentication/oauth-openshift-558db77b4-l447b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-l447b\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:33 crc kubenswrapper[4863]: I1205 06:50:33.317058 4863 status_manager.go:851] "Failed to get status for pod" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.106:6443: connect: connection refused" Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.324049 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.324332 4863 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18" exitCode=1 Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.324390 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18"} Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.324900 4863 scope.go:117] "RemoveContainer" containerID="beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18" Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.330371 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8c29e57f70736c6fd40270194cda3e33e0b660a0cb68f10d1b4a0680c7bb56e7"} Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.330624 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5cbb0196c31eda3c3f835d541552560623cb4c615caed9a903b26aafb86e0ebe"} Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.330716 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bcb150d6afa88126cba0950c35cacf56e092287d75245a60589a74a43b18734f"} Dec 05 06:50:34 crc kubenswrapper[4863]: I1205 06:50:34.844032 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.337770 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"40a69661df507daf9a30370379b7e180eb23143b8c2bc8bd7b5e88370b642be9"} Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.337813 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c57aadfdbef32ccc056618d8935ecf4eed758883c75e7d18baaeb15553060265"} Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.339439 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.339694 4863 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.339722 4863 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.339768 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 06:50:35 crc kubenswrapper[4863]: I1205 06:50:35.339793 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fc626589ebddd6825e2bd9071978c75063283625650d385fdf6f2878b962b368"} Dec 05 06:50:36 crc kubenswrapper[4863]: I1205 06:50:36.628848 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:36 crc kubenswrapper[4863]: I1205 06:50:36.628951 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:36 crc kubenswrapper[4863]: I1205 06:50:36.637747 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:37 crc kubenswrapper[4863]: I1205 06:50:37.735444 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:50:37 crc kubenswrapper[4863]: I1205 06:50:37.735759 4863 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 06:50:37 crc kubenswrapper[4863]: I1205 06:50:37.735960 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.347295 4863 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfe08358-8522-47be-b000-513f63c5bfbf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcb150d6afa88126cba0950c35cacf56e092287d75245a60589a74a43b18734f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c29e57f70736c6fd40270194cda3e33e0b660a0cb68f10d1b4a0680c7bb56e7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbb0196c31eda3c3f835d541552560623cb4c615caed9a903b26aafb86e0ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a69661df507daf9a30370379b7e180eb23143b8c2bc8bd7b5e88370b642be9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57aadfdbef32ccc056618d8935ecf4eed758883c75e7d18baaeb15553060265\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T06:50:34Z\\\"}}}],\\\"phase\\\":\\\"Running\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": pods \"kube-apiserver-crc\" not found" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.347569 4863 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.377885 4863 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.377929 4863 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.382595 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:50:40 crc kubenswrapper[4863]: I1205 06:50:40.385600 4863 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7a713e63-1e00-4d8c-8d69-0ea93f0b4b71" Dec 05 06:50:41 crc kubenswrapper[4863]: I1205 06:50:41.384539 4863 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:41 crc kubenswrapper[4863]: I1205 06:50:41.384589 4863 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="dfe08358-8522-47be-b000-513f63c5bfbf" Dec 05 06:50:42 crc kubenswrapper[4863]: I1205 06:50:42.056149 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:50:42 crc kubenswrapper[4863]: I1205 06:50:42.635437 4863 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7a713e63-1e00-4d8c-8d69-0ea93f0b4b71" Dec 05 06:50:47 crc kubenswrapper[4863]: I1205 06:50:47.735254 4863 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 06:50:47 crc kubenswrapper[4863]: I1205 06:50:47.735808 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 06:50:50 crc kubenswrapper[4863]: I1205 06:50:50.509955 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 06:50:50 crc kubenswrapper[4863]: I1205 06:50:50.528336 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 06:50:51 crc kubenswrapper[4863]: I1205 06:50:51.468337 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 06:50:51 crc kubenswrapper[4863]: I1205 06:50:51.799909 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 06:50:51 crc kubenswrapper[4863]: I1205 06:50:51.801314 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 06:50:52 crc kubenswrapper[4863]: I1205 06:50:52.053247 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 06:50:52 crc kubenswrapper[4863]: I1205 06:50:52.226967 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 06:50:52 crc kubenswrapper[4863]: I1205 06:50:52.408062 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 06:50:52 crc kubenswrapper[4863]: I1205 06:50:52.866977 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.127278 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.514253 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.531117 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.683183 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.699680 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.767011 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.797978 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 06:50:53 crc kubenswrapper[4863]: I1205 06:50:53.913252 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.017286 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.035312 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.077632 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.354790 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.355385 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.461674 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.586741 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.783132 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.911239 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 06:50:54 crc kubenswrapper[4863]: I1205 06:50:54.992838 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.192527 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.199675 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.311639 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.372632 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.390835 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.420299 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.452970 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.509372 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.558188 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.652546 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.654112 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.688127 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.756555 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.757519 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.861022 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.861992 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.920013 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 06:50:55 crc kubenswrapper[4863]: I1205 06:50:55.937728 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.012155 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.037878 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.103168 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.240980 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.361013 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.370812 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.439380 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.527089 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.531847 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.611819 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.618003 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.739996 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.793575 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.841157 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 06:50:56 crc kubenswrapper[4863]: I1205 06:50:56.907667 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.145503 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.145529 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.296047 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.368413 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.401607 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.433455 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.522229 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.670105 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.725999 4863 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.736132 4863 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.736196 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.736249 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.736846 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"fc626589ebddd6825e2bd9071978c75063283625650d385fdf6f2878b962b368"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.736958 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://fc626589ebddd6825e2bd9071978c75063283625650d385fdf6f2878b962b368" gracePeriod=30 Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.780791 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.807774 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.897589 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.911670 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 06:50:57 crc kubenswrapper[4863]: I1205 06:50:57.933444 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.078521 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.201986 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.262558 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.299815 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.315318 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.318260 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.346946 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.388795 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.395787 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.429828 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.440752 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.471369 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.491286 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.494352 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.495808 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.641167 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.647074 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.954940 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 06:50:58 crc kubenswrapper[4863]: I1205 06:50:58.981406 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.001195 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.023986 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.082506 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.160380 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.210515 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.319783 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.459185 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.712013 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.726211 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.876406 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.955125 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 06:50:59 crc kubenswrapper[4863]: I1205 06:50:59.972342 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.162795 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.185527 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.245889 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.290591 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.318433 4863 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.355957 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.369178 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.396284 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.453968 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.580801 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.581508 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.694002 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.694570 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.736653 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.756241 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.810968 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.876601 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.877430 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.891989 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.898732 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 06:51:00 crc kubenswrapper[4863]: I1205 06:51:00.926812 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.012226 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.015249 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.088859 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.116615 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.149642 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.166062 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.201432 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.253710 4863 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.353169 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.369262 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.381778 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.421236 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.491137 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.626704 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.643575 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.752355 4863 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.756414 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.765546 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 06:51:01 crc kubenswrapper[4863]: I1205 06:51:01.933147 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.009614 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.034452 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.049844 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.072066 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.241605 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.254732 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.325532 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.425047 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.573839 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.623088 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.692533 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.733355 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.764065 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.796870 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.801952 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.835673 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.845793 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 06:51:02 crc kubenswrapper[4863]: I1205 06:51:02.855178 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.013805 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.056078 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.065783 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.110940 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.175328 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.176143 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.183910 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.231969 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.268385 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.386823 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.424422 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.526015 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.598205 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.608562 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.674233 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.706398 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.762166 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.811973 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 06:51:03 crc kubenswrapper[4863]: I1205 06:51:03.842006 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.013424 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.018698 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.072263 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.085562 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.086856 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.124313 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.125386 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.195880 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.254155 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.304856 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.334323 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.358002 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.370729 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.551619 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.580835 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.657357 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.760386 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.928695 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.950527 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.976923 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 06:51:04 crc kubenswrapper[4863]: I1205 06:51:04.998840 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.463933 4863 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.472010 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-l447b","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.472100 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-6cc8b7fbff-kq964"] Dec 05 06:51:05 crc kubenswrapper[4863]: E1205 06:51:05.472587 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" containerName="oauth-openshift" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.472791 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" containerName="oauth-openshift" Dec 05 06:51:05 crc kubenswrapper[4863]: E1205 06:51:05.472832 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" containerName="installer" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.472839 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" containerName="installer" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.473061 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" containerName="oauth-openshift" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.473089 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0f9656f-c10c-416a-998e-0a39b3641e55" containerName="installer" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.473772 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.477410 4863 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.477672 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.479340 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.479555 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.481326 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.481787 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.481838 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.481945 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.482379 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.482449 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.482718 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.482861 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.482390 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.483384 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.484356 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.494604 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.494775 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.499412 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.504440 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.534912 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.534819915 podStartE2EDuration="25.534819915s" podCreationTimestamp="2025-12-05 06:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:51:05.532753534 +0000 UTC m=+293.258750604" watchObservedRunningTime="2025-12-05 06:51:05.534819915 +0000 UTC m=+293.260816975" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.541995 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.564401 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.601962 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vhmh\" (UniqueName: \"kubernetes.io/projected/e6abf69b-dd3e-4df8-b825-42ebbd41a611-kube-api-access-4vhmh\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.602016 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-error\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.602049 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.602084 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-session\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.602510 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-dir\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.602938 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-router-certs\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603022 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603088 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603200 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-service-ca\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603273 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603325 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-login\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603386 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-policies\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603623 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.603679 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.631015 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705388 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-service-ca\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705533 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705602 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-login\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705699 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-policies\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705742 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705777 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705819 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vhmh\" (UniqueName: \"kubernetes.io/projected/e6abf69b-dd3e-4df8-b825-42ebbd41a611-kube-api-access-4vhmh\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705860 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-error\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705897 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705941 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-session\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.705994 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-dir\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.706123 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-router-certs\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.706255 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.706305 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.706397 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-service-ca\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.707457 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.708263 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-dir\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.709409 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-audit-policies\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.710882 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.714777 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.714821 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-login\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.721546 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.722995 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.725229 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-router-certs\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.725794 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.727120 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.729036 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.729059 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-system-session\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.729909 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6abf69b-dd3e-4df8-b825-42ebbd41a611-v4-0-config-user-template-error\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.734458 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vhmh\" (UniqueName: \"kubernetes.io/projected/e6abf69b-dd3e-4df8-b825-42ebbd41a611-kube-api-access-4vhmh\") pod \"oauth-openshift-6cc8b7fbff-kq964\" (UID: \"e6abf69b-dd3e-4df8-b825-42ebbd41a611\") " pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.766456 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.794965 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.847661 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.888773 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 06:51:05 crc kubenswrapper[4863]: I1205 06:51:05.907097 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.054450 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6cc8b7fbff-kq964"] Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.095546 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.225032 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.233428 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.270263 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.477434 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.501669 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.502360 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.561000 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" event={"ID":"e6abf69b-dd3e-4df8-b825-42ebbd41a611","Type":"ContainerStarted","Data":"e42abc09c950db2d0e233026f1aee6c45f4a8e34b18dbf6d2886e0495b548ee9"} Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.561043 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" event={"ID":"e6abf69b-dd3e-4df8-b825-42ebbd41a611","Type":"ContainerStarted","Data":"29981e31e211d6446f74a58012d3e3a8f75d80aab3218a8e67f3cd21d48cb661"} Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.562081 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.587237 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.589774 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" podStartSLOduration=67.589751517 podStartE2EDuration="1m7.589751517s" podCreationTimestamp="2025-12-05 06:49:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:51:06.587634345 +0000 UTC m=+294.313631395" watchObservedRunningTime="2025-12-05 06:51:06.589751517 +0000 UTC m=+294.315748557" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.611185 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4db68aa4-711a-4795-ad76-64d3dc63e61b" path="/var/lib/kubelet/pods/4db68aa4-711a-4795-ad76-64d3dc63e61b/volumes" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.773970 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.872604 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.934653 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.942772 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6cc8b7fbff-kq964" Dec 05 06:51:06 crc kubenswrapper[4863]: I1205 06:51:06.949355 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.010327 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.285742 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.663503 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.743062 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.861985 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 06:51:07 crc kubenswrapper[4863]: I1205 06:51:07.902757 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 06:51:08 crc kubenswrapper[4863]: I1205 06:51:08.034226 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 06:51:08 crc kubenswrapper[4863]: I1205 06:51:08.718290 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 06:51:10 crc kubenswrapper[4863]: I1205 06:51:10.036421 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 06:51:14 crc kubenswrapper[4863]: I1205 06:51:14.247322 4863 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 06:51:14 crc kubenswrapper[4863]: I1205 06:51:14.248215 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://d5cde533f538eb2beedf4108efc458d8d9e76d0d964b37c1d1bcc33ef36cd4a9" gracePeriod=5 Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.661362 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.662179 4863 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="d5cde533f538eb2beedf4108efc458d8d9e76d0d964b37c1d1bcc33ef36cd4a9" exitCode=137 Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.855582 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.855704 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.911439 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.911553 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.911588 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.911671 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.911778 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.913059 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.913115 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.913107 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.913137 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:51:19 crc kubenswrapper[4863]: I1205 06:51:19.923427 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.013814 4863 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.013889 4863 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.013906 4863 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.013921 4863 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.013935 4863 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.615205 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.673294 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.673401 4863 scope.go:117] "RemoveContainer" containerID="d5cde533f538eb2beedf4108efc458d8d9e76d0d964b37c1d1bcc33ef36cd4a9" Dec 05 06:51:20 crc kubenswrapper[4863]: I1205 06:51:20.673499 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.735728 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.739269 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.739356 4863 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="fc626589ebddd6825e2bd9071978c75063283625650d385fdf6f2878b962b368" exitCode=137 Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.739400 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"fc626589ebddd6825e2bd9071978c75063283625650d385fdf6f2878b962b368"} Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.739439 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"446625d9055dde346dbf165425ea1d640ec91795a7e3e8dd665df18e960fcdef"} Dec 05 06:51:28 crc kubenswrapper[4863]: I1205 06:51:28.739466 4863 scope.go:117] "RemoveContainer" containerID="beb7a4f62a7462d1c404720e710b580c0d6e58296ab9c890e90a4f7804eeac18" Dec 05 06:51:29 crc kubenswrapper[4863]: I1205 06:51:29.750989 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 06:51:32 crc kubenswrapper[4863]: I1205 06:51:32.056724 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:51:37 crc kubenswrapper[4863]: I1205 06:51:37.735026 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:51:37 crc kubenswrapper[4863]: I1205 06:51:37.743940 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:51:42 crc kubenswrapper[4863]: I1205 06:51:42.062019 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.050188 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.050850 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" podUID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" containerName="controller-manager" containerID="cri-o://bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa" gracePeriod=30 Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.053949 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.054154 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" podUID="979cb49e-89dd-4019-ad1c-bae78a50d877" containerName="route-controller-manager" containerID="cri-o://00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d" gracePeriod=30 Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.449730 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.506460 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599024 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config\") pod \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599068 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvzsf\" (UniqueName: \"kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf\") pod \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599091 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config\") pod \"979cb49e-89dd-4019-ad1c-bae78a50d877\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599154 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca\") pod \"979cb49e-89dd-4019-ad1c-bae78a50d877\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599184 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert\") pod \"979cb49e-89dd-4019-ad1c-bae78a50d877\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599199 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca\") pod \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599219 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert\") pod \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599237 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76nbm\" (UniqueName: \"kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm\") pod \"979cb49e-89dd-4019-ad1c-bae78a50d877\" (UID: \"979cb49e-89dd-4019-ad1c-bae78a50d877\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.599259 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles\") pod \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\" (UID: \"494f22d4-5ac0-4975-86fa-86cc2b1b3306\") " Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.600043 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "494f22d4-5ac0-4975-86fa-86cc2b1b3306" (UID: "494f22d4-5ac0-4975-86fa-86cc2b1b3306"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.600202 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca" (OuterVolumeSpecName: "client-ca") pod "494f22d4-5ac0-4975-86fa-86cc2b1b3306" (UID: "494f22d4-5ac0-4975-86fa-86cc2b1b3306"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.600378 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca" (OuterVolumeSpecName: "client-ca") pod "979cb49e-89dd-4019-ad1c-bae78a50d877" (UID: "979cb49e-89dd-4019-ad1c-bae78a50d877"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.600417 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config" (OuterVolumeSpecName: "config") pod "494f22d4-5ac0-4975-86fa-86cc2b1b3306" (UID: "494f22d4-5ac0-4975-86fa-86cc2b1b3306"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.600532 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config" (OuterVolumeSpecName: "config") pod "979cb49e-89dd-4019-ad1c-bae78a50d877" (UID: "979cb49e-89dd-4019-ad1c-bae78a50d877"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.604947 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf" (OuterVolumeSpecName: "kube-api-access-rvzsf") pod "494f22d4-5ac0-4975-86fa-86cc2b1b3306" (UID: "494f22d4-5ac0-4975-86fa-86cc2b1b3306"). InnerVolumeSpecName "kube-api-access-rvzsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.605044 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "494f22d4-5ac0-4975-86fa-86cc2b1b3306" (UID: "494f22d4-5ac0-4975-86fa-86cc2b1b3306"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.605557 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "979cb49e-89dd-4019-ad1c-bae78a50d877" (UID: "979cb49e-89dd-4019-ad1c-bae78a50d877"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.606931 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm" (OuterVolumeSpecName: "kube-api-access-76nbm") pod "979cb49e-89dd-4019-ad1c-bae78a50d877" (UID: "979cb49e-89dd-4019-ad1c-bae78a50d877"). InnerVolumeSpecName "kube-api-access-76nbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.700981 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/494f22d4-5ac0-4975-86fa-86cc2b1b3306-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701016 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76nbm\" (UniqueName: \"kubernetes.io/projected/979cb49e-89dd-4019-ad1c-bae78a50d877-kube-api-access-76nbm\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701027 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701036 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701046 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvzsf\" (UniqueName: \"kubernetes.io/projected/494f22d4-5ac0-4975-86fa-86cc2b1b3306-kube-api-access-rvzsf\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701055 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701063 4863 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/979cb49e-89dd-4019-ad1c-bae78a50d877-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701071 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/979cb49e-89dd-4019-ad1c-bae78a50d877-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.701078 4863 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/494f22d4-5ac0-4975-86fa-86cc2b1b3306-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.874122 4863 generic.go:334] "Generic (PLEG): container finished" podID="979cb49e-89dd-4019-ad1c-bae78a50d877" containerID="00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d" exitCode=0 Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.874204 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" event={"ID":"979cb49e-89dd-4019-ad1c-bae78a50d877","Type":"ContainerDied","Data":"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d"} Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.874238 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" event={"ID":"979cb49e-89dd-4019-ad1c-bae78a50d877","Type":"ContainerDied","Data":"6eca68745f4c1fb76146187955540470daeea89cb7cc7a79cd321359b334d96c"} Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.874263 4863 scope.go:117] "RemoveContainer" containerID="00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.874291 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.877096 4863 generic.go:334] "Generic (PLEG): container finished" podID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" containerID="bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa" exitCode=0 Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.877161 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" event={"ID":"494f22d4-5ac0-4975-86fa-86cc2b1b3306","Type":"ContainerDied","Data":"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa"} Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.877202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" event={"ID":"494f22d4-5ac0-4975-86fa-86cc2b1b3306","Type":"ContainerDied","Data":"f6011392965391cb34bfa3d04af604d173fac70956b361303d7203371eae832e"} Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.877277 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xq987" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.906208 4863 scope.go:117] "RemoveContainer" containerID="00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d" Dec 05 06:51:47 crc kubenswrapper[4863]: E1205 06:51:47.906713 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d\": container with ID starting with 00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d not found: ID does not exist" containerID="00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.906748 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d"} err="failed to get container status \"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d\": rpc error: code = NotFound desc = could not find container \"00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d\": container with ID starting with 00e85106c15b77ff60d914c75917fed2ee08ca1dc23a06aaafd5ee6aba3cdf6d not found: ID does not exist" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.906771 4863 scope.go:117] "RemoveContainer" containerID="bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.923506 4863 scope.go:117] "RemoveContainer" containerID="bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa" Dec 05 06:51:47 crc kubenswrapper[4863]: E1205 06:51:47.923964 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa\": container with ID starting with bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa not found: ID does not exist" containerID="bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.924044 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa"} err="failed to get container status \"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa\": rpc error: code = NotFound desc = could not find container \"bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa\": container with ID starting with bf922041038f4b3a172e30e74ad0636f80f4b9b433324fe9d938a6c3998bfffa not found: ID does not exist" Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.924947 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.931497 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xq987"] Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.936506 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:51:47 crc kubenswrapper[4863]: I1205 06:51:47.940557 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6rxcb"] Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119288 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:51:48 crc kubenswrapper[4863]: E1205 06:51:48.119616 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119637 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 06:51:48 crc kubenswrapper[4863]: E1205 06:51:48.119676 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" containerName="controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119689 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" containerName="controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: E1205 06:51:48.119712 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979cb49e-89dd-4019-ad1c-bae78a50d877" containerName="route-controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119726 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="979cb49e-89dd-4019-ad1c-bae78a50d877" containerName="route-controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119876 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="979cb49e-89dd-4019-ad1c-bae78a50d877" containerName="route-controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119903 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.119918 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" containerName="controller-manager" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.120528 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.123680 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.123944 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.124675 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.126783 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.127256 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.127448 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.136609 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.148465 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.207726 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjrjf\" (UniqueName: \"kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.207795 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.207825 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.207853 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.207922 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.309389 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.309517 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.309548 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjrjf\" (UniqueName: \"kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.309586 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.309608 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.311190 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.312037 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.312339 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.321653 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.326225 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjrjf\" (UniqueName: \"kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf\") pod \"controller-manager-6b7f7d5cdc-sn4qg\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.436530 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.611025 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="494f22d4-5ac0-4975-86fa-86cc2b1b3306" path="/var/lib/kubelet/pods/494f22d4-5ac0-4975-86fa-86cc2b1b3306/volumes" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.612085 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="979cb49e-89dd-4019-ad1c-bae78a50d877" path="/var/lib/kubelet/pods/979cb49e-89dd-4019-ad1c-bae78a50d877/volumes" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.670559 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:51:48 crc kubenswrapper[4863]: W1205 06:51:48.684786 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8d7f1a0_2e43_4de6_9f5e_493113431cfc.slice/crio-544492d49aec69fbdf990f80cc9f901d68646c761260c764059e2a23b7488211 WatchSource:0}: Error finding container 544492d49aec69fbdf990f80cc9f901d68646c761260c764059e2a23b7488211: Status 404 returned error can't find the container with id 544492d49aec69fbdf990f80cc9f901d68646c761260c764059e2a23b7488211 Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.886402 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" event={"ID":"c8d7f1a0-2e43-4de6-9f5e-493113431cfc","Type":"ContainerStarted","Data":"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42"} Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.886695 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" event={"ID":"c8d7f1a0-2e43-4de6-9f5e-493113431cfc","Type":"ContainerStarted","Data":"544492d49aec69fbdf990f80cc9f901d68646c761260c764059e2a23b7488211"} Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.886816 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.891421 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:51:48 crc kubenswrapper[4863]: I1205 06:51:48.936048 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" podStartSLOduration=1.936023934 podStartE2EDuration="1.936023934s" podCreationTimestamp="2025-12-05 06:51:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:51:48.911454719 +0000 UTC m=+336.637451809" watchObservedRunningTime="2025-12-05 06:51:48.936023934 +0000 UTC m=+336.662020994" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.110726 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6"] Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.111464 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.113348 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.113767 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.113788 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.113822 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.113988 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.114633 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.120142 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6"] Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.222299 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-client-ca\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.222348 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24wcd\" (UniqueName: \"kubernetes.io/projected/e2148731-195c-4487-a656-94f18bee3c64-kube-api-access-24wcd\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.222652 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2148731-195c-4487-a656-94f18bee3c64-serving-cert\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.222710 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-config\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.323878 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2148731-195c-4487-a656-94f18bee3c64-serving-cert\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.323942 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-config\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.323988 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-client-ca\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.324022 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24wcd\" (UniqueName: \"kubernetes.io/projected/e2148731-195c-4487-a656-94f18bee3c64-kube-api-access-24wcd\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.324970 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-client-ca\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.325126 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2148731-195c-4487-a656-94f18bee3c64-config\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.329662 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2148731-195c-4487-a656-94f18bee3c64-serving-cert\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.344675 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24wcd\" (UniqueName: \"kubernetes.io/projected/e2148731-195c-4487-a656-94f18bee3c64-kube-api-access-24wcd\") pod \"route-controller-manager-7cd55579f4-bcnr6\" (UID: \"e2148731-195c-4487-a656-94f18bee3c64\") " pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.424413 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.674699 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6"] Dec 05 06:51:49 crc kubenswrapper[4863]: W1205 06:51:49.683654 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2148731_195c_4487_a656_94f18bee3c64.slice/crio-4a02cbe70a45b8f6bf7a8b7b6fa6323ee2420163cf2587ea5d6f8ca621d458f6 WatchSource:0}: Error finding container 4a02cbe70a45b8f6bf7a8b7b6fa6323ee2420163cf2587ea5d6f8ca621d458f6: Status 404 returned error can't find the container with id 4a02cbe70a45b8f6bf7a8b7b6fa6323ee2420163cf2587ea5d6f8ca621d458f6 Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.897630 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" event={"ID":"e2148731-195c-4487-a656-94f18bee3c64","Type":"ContainerStarted","Data":"ddc91a32afd3da882656963e83e5faeee8ac1db6a25f7c8221d4dd65ff8d6c59"} Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.897676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" event={"ID":"e2148731-195c-4487-a656-94f18bee3c64","Type":"ContainerStarted","Data":"4a02cbe70a45b8f6bf7a8b7b6fa6323ee2420163cf2587ea5d6f8ca621d458f6"} Dec 05 06:51:49 crc kubenswrapper[4863]: I1205 06:51:49.914313 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" podStartSLOduration=2.914290206 podStartE2EDuration="2.914290206s" podCreationTimestamp="2025-12-05 06:51:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:51:49.911796812 +0000 UTC m=+337.637793852" watchObservedRunningTime="2025-12-05 06:51:49.914290206 +0000 UTC m=+337.640287246" Dec 05 06:51:50 crc kubenswrapper[4863]: I1205 06:51:50.904131 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:51:50 crc kubenswrapper[4863]: I1205 06:51:50.913141 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7cd55579f4-bcnr6" Dec 05 06:52:08 crc kubenswrapper[4863]: I1205 06:52:08.464647 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:52:08 crc kubenswrapper[4863]: I1205 06:52:08.465462 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:52:27 crc kubenswrapper[4863]: I1205 06:52:27.391846 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:52:27 crc kubenswrapper[4863]: I1205 06:52:27.393205 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" podUID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" containerName="controller-manager" containerID="cri-o://5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42" gracePeriod=30 Dec 05 06:52:27 crc kubenswrapper[4863]: I1205 06:52:27.865437 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.040507 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjrjf\" (UniqueName: \"kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf\") pod \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.040966 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca\") pod \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.041944 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca" (OuterVolumeSpecName: "client-ca") pod "c8d7f1a0-2e43-4de6-9f5e-493113431cfc" (UID: "c8d7f1a0-2e43-4de6-9f5e-493113431cfc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.042080 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert\") pod \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.042669 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles\") pod \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.042770 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config\") pod \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\" (UID: \"c8d7f1a0-2e43-4de6-9f5e-493113431cfc\") " Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.043208 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c8d7f1a0-2e43-4de6-9f5e-493113431cfc" (UID: "c8d7f1a0-2e43-4de6-9f5e-493113431cfc"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.043382 4863 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.043414 4863 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.043438 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config" (OuterVolumeSpecName: "config") pod "c8d7f1a0-2e43-4de6-9f5e-493113431cfc" (UID: "c8d7f1a0-2e43-4de6-9f5e-493113431cfc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.048941 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf" (OuterVolumeSpecName: "kube-api-access-zjrjf") pod "c8d7f1a0-2e43-4de6-9f5e-493113431cfc" (UID: "c8d7f1a0-2e43-4de6-9f5e-493113431cfc"). InnerVolumeSpecName "kube-api-access-zjrjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.049014 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c8d7f1a0-2e43-4de6-9f5e-493113431cfc" (UID: "c8d7f1a0-2e43-4de6-9f5e-493113431cfc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.144703 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.144736 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjrjf\" (UniqueName: \"kubernetes.io/projected/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-kube-api-access-zjrjf\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.144750 4863 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8d7f1a0-2e43-4de6-9f5e-493113431cfc-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.149847 4863 generic.go:334] "Generic (PLEG): container finished" podID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" containerID="5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42" exitCode=0 Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.149899 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" event={"ID":"c8d7f1a0-2e43-4de6-9f5e-493113431cfc","Type":"ContainerDied","Data":"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42"} Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.149935 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" event={"ID":"c8d7f1a0-2e43-4de6-9f5e-493113431cfc","Type":"ContainerDied","Data":"544492d49aec69fbdf990f80cc9f901d68646c761260c764059e2a23b7488211"} Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.149947 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.149958 4863 scope.go:117] "RemoveContainer" containerID="5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.181628 4863 scope.go:117] "RemoveContainer" containerID="5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42" Dec 05 06:52:28 crc kubenswrapper[4863]: E1205 06:52:28.182342 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42\": container with ID starting with 5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42 not found: ID does not exist" containerID="5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.182382 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42"} err="failed to get container status \"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42\": rpc error: code = NotFound desc = could not find container \"5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42\": container with ID starting with 5e2bb36a00d68e579730a8cb46d992c849481bfda0ad11535d7efed220e79d42 not found: ID does not exist" Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.198221 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.217801 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6b7f7d5cdc-sn4qg"] Dec 05 06:52:28 crc kubenswrapper[4863]: I1205 06:52:28.614235 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" path="/var/lib/kubelet/pods/c8d7f1a0-2e43-4de6-9f5e-493113431cfc/volumes" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.146311 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-79c48665d7-b5t8k"] Dec 05 06:52:29 crc kubenswrapper[4863]: E1205 06:52:29.146692 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" containerName="controller-manager" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.146715 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" containerName="controller-manager" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.146889 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8d7f1a0-2e43-4de6-9f5e-493113431cfc" containerName="controller-manager" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.147689 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.151934 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.153039 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.153123 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.154262 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.154569 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.157851 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-client-ca\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.157934 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-config\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.158057 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpxl9\" (UniqueName: \"kubernetes.io/projected/d3f56638-72f7-416b-b3fe-17a0f8e50761-kube-api-access-xpxl9\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.158139 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-proxy-ca-bundles\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.158216 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f56638-72f7-416b-b3fe-17a0f8e50761-serving-cert\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.160817 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.168611 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79c48665d7-b5t8k"] Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.179445 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.259345 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-client-ca\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.259847 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-config\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.260125 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpxl9\" (UniqueName: \"kubernetes.io/projected/d3f56638-72f7-416b-b3fe-17a0f8e50761-kube-api-access-xpxl9\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.260299 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-proxy-ca-bundles\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.260562 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f56638-72f7-416b-b3fe-17a0f8e50761-serving-cert\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.262078 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-client-ca\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.262407 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-config\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.263551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3f56638-72f7-416b-b3fe-17a0f8e50761-proxy-ca-bundles\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.267092 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f56638-72f7-416b-b3fe-17a0f8e50761-serving-cert\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.290174 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpxl9\" (UniqueName: \"kubernetes.io/projected/d3f56638-72f7-416b-b3fe-17a0f8e50761-kube-api-access-xpxl9\") pod \"controller-manager-79c48665d7-b5t8k\" (UID: \"d3f56638-72f7-416b-b3fe-17a0f8e50761\") " pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.476148 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:29 crc kubenswrapper[4863]: I1205 06:52:29.758675 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79c48665d7-b5t8k"] Dec 05 06:52:29 crc kubenswrapper[4863]: W1205 06:52:29.770336 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3f56638_72f7_416b_b3fe_17a0f8e50761.slice/crio-033c52a8ce422e4268d0b9bf2c6e8a2d478a8cb87ba0b80d938f1413970a54b8 WatchSource:0}: Error finding container 033c52a8ce422e4268d0b9bf2c6e8a2d478a8cb87ba0b80d938f1413970a54b8: Status 404 returned error can't find the container with id 033c52a8ce422e4268d0b9bf2c6e8a2d478a8cb87ba0b80d938f1413970a54b8 Dec 05 06:52:30 crc kubenswrapper[4863]: I1205 06:52:30.169422 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" event={"ID":"d3f56638-72f7-416b-b3fe-17a0f8e50761","Type":"ContainerStarted","Data":"6dbf0f195d19cfed29ef2e43786dc4166fda6e08203175d814ea1c75199fa08b"} Dec 05 06:52:30 crc kubenswrapper[4863]: I1205 06:52:30.169760 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:30 crc kubenswrapper[4863]: I1205 06:52:30.169775 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" event={"ID":"d3f56638-72f7-416b-b3fe-17a0f8e50761","Type":"ContainerStarted","Data":"033c52a8ce422e4268d0b9bf2c6e8a2d478a8cb87ba0b80d938f1413970a54b8"} Dec 05 06:52:30 crc kubenswrapper[4863]: I1205 06:52:30.177808 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" Dec 05 06:52:30 crc kubenswrapper[4863]: I1205 06:52:30.202344 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-79c48665d7-b5t8k" podStartSLOduration=3.202315687 podStartE2EDuration="3.202315687s" podCreationTimestamp="2025-12-05 06:52:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:52:30.198257962 +0000 UTC m=+377.924255062" watchObservedRunningTime="2025-12-05 06:52:30.202315687 +0000 UTC m=+377.928312767" Dec 05 06:52:38 crc kubenswrapper[4863]: I1205 06:52:38.464780 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:52:38 crc kubenswrapper[4863]: I1205 06:52:38.465588 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.407150 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.408206 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-drxcz" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="registry-server" containerID="cri-o://7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3" gracePeriod=30 Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.419357 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.419681 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cpr9z" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="registry-server" containerID="cri-o://645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136" gracePeriod=30 Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.432064 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.432297 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerName="marketplace-operator" containerID="cri-o://2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff" gracePeriod=30 Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.458243 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.458629 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7jp8g" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="registry-server" containerID="cri-o://124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7" gracePeriod=30 Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.465321 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.465581 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5vcrz" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="registry-server" containerID="cri-o://4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340" gracePeriod=30 Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.469870 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8b74q"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.470698 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.478518 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8b74q"] Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.579159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.579648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mdsk\" (UniqueName: \"kubernetes.io/projected/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-kube-api-access-4mdsk\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.579697 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.680826 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.680940 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mdsk\" (UniqueName: \"kubernetes.io/projected/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-kube-api-access-4mdsk\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.681004 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.682185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.687368 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.700232 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mdsk\" (UniqueName: \"kubernetes.io/projected/bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7-kube-api-access-4mdsk\") pod \"marketplace-operator-79b997595-8b74q\" (UID: \"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7\") " pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.893300 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:50 crc kubenswrapper[4863]: I1205 06:52:50.898286 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.092879 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j9sb\" (UniqueName: \"kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb\") pod \"cb921038-e831-47ea-af78-e21e51079af7\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.093262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities\") pod \"cb921038-e831-47ea-af78-e21e51079af7\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.096103 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content\") pod \"cb921038-e831-47ea-af78-e21e51079af7\" (UID: \"cb921038-e831-47ea-af78-e21e51079af7\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.096618 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities" (OuterVolumeSpecName: "utilities") pod "cb921038-e831-47ea-af78-e21e51079af7" (UID: "cb921038-e831-47ea-af78-e21e51079af7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.098626 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb" (OuterVolumeSpecName: "kube-api-access-7j9sb") pod "cb921038-e831-47ea-af78-e21e51079af7" (UID: "cb921038-e831-47ea-af78-e21e51079af7"). InnerVolumeSpecName "kube-api-access-7j9sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.100016 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.100402 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.137570 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.146025 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.154284 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb921038-e831-47ea-af78-e21e51079af7" (UID: "cb921038-e831-47ea-af78-e21e51079af7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198483 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k578z\" (UniqueName: \"kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z\") pod \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198568 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities\") pod \"9b9447f4-590b-4d1d-8105-bfad4f700daa\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198601 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content\") pod \"1b94ae89-c171-471a-bd34-12ed57a752b8\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198683 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgr7v\" (UniqueName: \"kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v\") pod \"632b1594-6205-4741-a50d-fdd157e0f47e\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198715 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics\") pod \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities\") pod \"632b1594-6205-4741-a50d-fdd157e0f47e\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198818 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content\") pod \"632b1594-6205-4741-a50d-fdd157e0f47e\" (UID: \"632b1594-6205-4741-a50d-fdd157e0f47e\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198850 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txwqv\" (UniqueName: \"kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv\") pod \"1b94ae89-c171-471a-bd34-12ed57a752b8\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198890 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities\") pod \"1b94ae89-c171-471a-bd34-12ed57a752b8\" (UID: \"1b94ae89-c171-471a-bd34-12ed57a752b8\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198959 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content\") pod \"9b9447f4-590b-4d1d-8105-bfad4f700daa\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.198986 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca\") pod \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\" (UID: \"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.199022 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgkwc\" (UniqueName: \"kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc\") pod \"9b9447f4-590b-4d1d-8105-bfad4f700daa\" (UID: \"9b9447f4-590b-4d1d-8105-bfad4f700daa\") " Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.199388 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j9sb\" (UniqueName: \"kubernetes.io/projected/cb921038-e831-47ea-af78-e21e51079af7-kube-api-access-7j9sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.199415 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.199428 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb921038-e831-47ea-af78-e21e51079af7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.200587 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities" (OuterVolumeSpecName: "utilities") pod "632b1594-6205-4741-a50d-fdd157e0f47e" (UID: "632b1594-6205-4741-a50d-fdd157e0f47e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.200715 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities" (OuterVolumeSpecName: "utilities") pod "1b94ae89-c171-471a-bd34-12ed57a752b8" (UID: "1b94ae89-c171-471a-bd34-12ed57a752b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.202084 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc" (OuterVolumeSpecName: "kube-api-access-sgkwc") pod "9b9447f4-590b-4d1d-8105-bfad4f700daa" (UID: "9b9447f4-590b-4d1d-8105-bfad4f700daa"). InnerVolumeSpecName "kube-api-access-sgkwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.202783 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" (UID: "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.203206 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv" (OuterVolumeSpecName: "kube-api-access-txwqv") pod "1b94ae89-c171-471a-bd34-12ed57a752b8" (UID: "1b94ae89-c171-471a-bd34-12ed57a752b8"). InnerVolumeSpecName "kube-api-access-txwqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.203889 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities" (OuterVolumeSpecName: "utilities") pod "9b9447f4-590b-4d1d-8105-bfad4f700daa" (UID: "9b9447f4-590b-4d1d-8105-bfad4f700daa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.204232 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z" (OuterVolumeSpecName: "kube-api-access-k578z") pod "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" (UID: "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4"). InnerVolumeSpecName "kube-api-access-k578z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.206531 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" (UID: "50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.217819 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v" (OuterVolumeSpecName: "kube-api-access-lgr7v") pod "632b1594-6205-4741-a50d-fdd157e0f47e" (UID: "632b1594-6205-4741-a50d-fdd157e0f47e"). InnerVolumeSpecName "kube-api-access-lgr7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.243388 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b94ae89-c171-471a-bd34-12ed57a752b8" (UID: "1b94ae89-c171-471a-bd34-12ed57a752b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.260190 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b9447f4-590b-4d1d-8105-bfad4f700daa" (UID: "9b9447f4-590b-4d1d-8105-bfad4f700daa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300390 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300431 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txwqv\" (UniqueName: \"kubernetes.io/projected/1b94ae89-c171-471a-bd34-12ed57a752b8-kube-api-access-txwqv\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300443 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300453 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300464 4863 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300490 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgkwc\" (UniqueName: \"kubernetes.io/projected/9b9447f4-590b-4d1d-8105-bfad4f700daa-kube-api-access-sgkwc\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300499 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k578z\" (UniqueName: \"kubernetes.io/projected/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-kube-api-access-k578z\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300507 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b9447f4-590b-4d1d-8105-bfad4f700daa-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300519 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b94ae89-c171-471a-bd34-12ed57a752b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300528 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgr7v\" (UniqueName: \"kubernetes.io/projected/632b1594-6205-4741-a50d-fdd157e0f47e-kube-api-access-lgr7v\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.300536 4863 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.311804 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "632b1594-6205-4741-a50d-fdd157e0f47e" (UID: "632b1594-6205-4741-a50d-fdd157e0f47e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.394580 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8b74q"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.401296 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/632b1594-6205-4741-a50d-fdd157e0f47e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.482127 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerID="645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136" exitCode=0 Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.482189 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerDied","Data":"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.482217 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpr9z" event={"ID":"9b9447f4-590b-4d1d-8105-bfad4f700daa","Type":"ContainerDied","Data":"9c07bd9ca720ca8e97d26f40eebda2ce095eb74cea6d8e914dfe9db79b1cf03f"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.482236 4863 scope.go:117] "RemoveContainer" containerID="645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.482234 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpr9z" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.487060 4863 generic.go:334] "Generic (PLEG): container finished" podID="632b1594-6205-4741-a50d-fdd157e0f47e" containerID="4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340" exitCode=0 Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.487182 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerDied","Data":"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.487227 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5vcrz" event={"ID":"632b1594-6205-4741-a50d-fdd157e0f47e","Type":"ContainerDied","Data":"24e87803e64399f488a995aa021b03708c876f3900abec69561d78cb7d1d64c2"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.487899 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5vcrz" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.490775 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerID="124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7" exitCode=0 Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.490882 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerDied","Data":"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.490936 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7jp8g" event={"ID":"1b94ae89-c171-471a-bd34-12ed57a752b8","Type":"ContainerDied","Data":"88ac71343d29bbe6d9029676374b00e7f49d250a21b3c0aff5c763235c92cfd4"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.492766 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7jp8g" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.494287 4863 generic.go:334] "Generic (PLEG): container finished" podID="cb921038-e831-47ea-af78-e21e51079af7" containerID="7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3" exitCode=0 Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.494531 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerDied","Data":"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.494621 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drxcz" event={"ID":"cb921038-e831-47ea-af78-e21e51079af7","Type":"ContainerDied","Data":"f09a5d83ea9f3b5553a8e0b862ffd0a9d76ad912a8ace287ea8bbaaccb35b126"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.494768 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drxcz" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.503507 4863 scope.go:117] "RemoveContainer" containerID="702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.509910 4863 generic.go:334] "Generic (PLEG): container finished" podID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerID="2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff" exitCode=0 Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.510029 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.510053 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" event={"ID":"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4","Type":"ContainerDied","Data":"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.510892 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c878b" event={"ID":"50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4","Type":"ContainerDied","Data":"ca45814397adb0dce36731067cdd3effc5b439905ae38a64690a02121afb7c2d"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.514929 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" event={"ID":"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7","Type":"ContainerStarted","Data":"36c05f17a53f53ebe97a5d7ea3d5d18de882b34921e084268c106d56c8111299"} Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.536478 4863 scope.go:117] "RemoveContainer" containerID="122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.596367 4863 scope.go:117] "RemoveContainer" containerID="645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.596516 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.596969 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136\": container with ID starting with 645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136 not found: ID does not exist" containerID="645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.597009 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136"} err="failed to get container status \"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136\": rpc error: code = NotFound desc = could not find container \"645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136\": container with ID starting with 645599396d22d2a98cf528db2864a05070e571dc20e02576a527e8e0c7654136 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.597036 4863 scope.go:117] "RemoveContainer" containerID="702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.597491 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944\": container with ID starting with 702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944 not found: ID does not exist" containerID="702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.597518 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944"} err="failed to get container status \"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944\": rpc error: code = NotFound desc = could not find container \"702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944\": container with ID starting with 702e02a4afa8efa920806e9dc94985f86ee4498fc29124edbf583df82c85c944 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.597533 4863 scope.go:117] "RemoveContainer" containerID="122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.597767 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726\": container with ID starting with 122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726 not found: ID does not exist" containerID="122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.598296 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726"} err="failed to get container status \"122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726\": rpc error: code = NotFound desc = could not find container \"122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726\": container with ID starting with 122ff407a79895f9c3f0a8d5b4d4fc7e37cbc74753d5499ab941c7273171b726 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.598318 4863 scope.go:117] "RemoveContainer" containerID="4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.604775 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cpr9z"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.619406 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.631527 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7jp8g"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.633650 4863 scope.go:117] "RemoveContainer" containerID="67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.636141 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.646015 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c878b"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.651316 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.657406 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5vcrz"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.660830 4863 scope.go:117] "RemoveContainer" containerID="0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.667883 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.672369 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-drxcz"] Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.675587 4863 scope.go:117] "RemoveContainer" containerID="4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.675915 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340\": container with ID starting with 4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340 not found: ID does not exist" containerID="4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.675955 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340"} err="failed to get container status \"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340\": rpc error: code = NotFound desc = could not find container \"4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340\": container with ID starting with 4e2471ddf3c4453042b30e5cb163b0aac951813a0304a83487bafe4ad94b6340 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.675989 4863 scope.go:117] "RemoveContainer" containerID="67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.676346 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b\": container with ID starting with 67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b not found: ID does not exist" containerID="67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.676371 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b"} err="failed to get container status \"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b\": rpc error: code = NotFound desc = could not find container \"67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b\": container with ID starting with 67eea7d4e8807430a48605f1f2d0e9eb19800703c287d563797c7a6549552d2b not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.676396 4863 scope.go:117] "RemoveContainer" containerID="0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.676601 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93\": container with ID starting with 0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93 not found: ID does not exist" containerID="0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.676626 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93"} err="failed to get container status \"0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93\": rpc error: code = NotFound desc = could not find container \"0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93\": container with ID starting with 0a2002f7f2307837ee38cdd6790ee14cfca7859a202007669785a66097c9bb93 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.676643 4863 scope.go:117] "RemoveContainer" containerID="124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.687340 4863 scope.go:117] "RemoveContainer" containerID="dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.701065 4863 scope.go:117] "RemoveContainer" containerID="ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.719234 4863 scope.go:117] "RemoveContainer" containerID="124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.719657 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7\": container with ID starting with 124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7 not found: ID does not exist" containerID="124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.719696 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7"} err="failed to get container status \"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7\": rpc error: code = NotFound desc = could not find container \"124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7\": container with ID starting with 124ecdb3f3e755e43cf052ab715f11d4e6637e741e56554e63578b375339c0b7 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.719723 4863 scope.go:117] "RemoveContainer" containerID="dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.720595 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91\": container with ID starting with dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91 not found: ID does not exist" containerID="dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.720634 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91"} err="failed to get container status \"dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91\": rpc error: code = NotFound desc = could not find container \"dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91\": container with ID starting with dbf8a1b57dc2a0084c9f30fd8c6dea246071d580667a6f40c73ca3e86d68dd91 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.720656 4863 scope.go:117] "RemoveContainer" containerID="ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.720953 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76\": container with ID starting with ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76 not found: ID does not exist" containerID="ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.720993 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76"} err="failed to get container status \"ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76\": rpc error: code = NotFound desc = could not find container \"ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76\": container with ID starting with ced31616c6a085a5be3d75d9a07a33dd9249d9979400f84e3f17f84899599e76 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.721018 4863 scope.go:117] "RemoveContainer" containerID="7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.733149 4863 scope.go:117] "RemoveContainer" containerID="eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.746839 4863 scope.go:117] "RemoveContainer" containerID="a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.760628 4863 scope.go:117] "RemoveContainer" containerID="7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.760983 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3\": container with ID starting with 7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3 not found: ID does not exist" containerID="7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761022 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3"} err="failed to get container status \"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3\": rpc error: code = NotFound desc = could not find container \"7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3\": container with ID starting with 7a92b294183d833f3083e1a7c98707a7bc7222c961e12afe3ef1759f6225b5a3 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761051 4863 scope.go:117] "RemoveContainer" containerID="eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.761312 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda\": container with ID starting with eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda not found: ID does not exist" containerID="eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761343 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda"} err="failed to get container status \"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda\": rpc error: code = NotFound desc = could not find container \"eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda\": container with ID starting with eba8b94757e546879b45f8e06c98d5aac20d4e73f0c7b20d3db6f06f8cbdeeda not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761367 4863 scope.go:117] "RemoveContainer" containerID="a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.761602 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3\": container with ID starting with a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3 not found: ID does not exist" containerID="a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761633 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3"} err="failed to get container status \"a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3\": rpc error: code = NotFound desc = could not find container \"a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3\": container with ID starting with a0db9acb09a95ffe2008f0c8b44df42c5f927ba05347e2381e0217dac69585b3 not found: ID does not exist" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.761650 4863 scope.go:117] "RemoveContainer" containerID="2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.771779 4863 scope.go:117] "RemoveContainer" containerID="2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff" Dec 05 06:52:51 crc kubenswrapper[4863]: E1205 06:52:51.772149 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff\": container with ID starting with 2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff not found: ID does not exist" containerID="2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff" Dec 05 06:52:51 crc kubenswrapper[4863]: I1205 06:52:51.772188 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff"} err="failed to get container status \"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff\": rpc error: code = NotFound desc = could not find container \"2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff\": container with ID starting with 2f8f5ab4c9f1985978f5337ce644315e2809f877b919d5db008e83ee47e95cff not found: ID does not exist" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.525189 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" event={"ID":"bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7","Type":"ContainerStarted","Data":"3a27cd24ed35e9d42adb5aa30c86fbf2691ab8b114d2f98828b0578a09946ec2"} Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.525454 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.529453 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.546129 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8b74q" podStartSLOduration=2.546113473 podStartE2EDuration="2.546113473s" podCreationTimestamp="2025-12-05 06:52:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:52:52.545519187 +0000 UTC m=+400.271516247" watchObservedRunningTime="2025-12-05 06:52:52.546113473 +0000 UTC m=+400.272110513" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.617281 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" path="/var/lib/kubelet/pods/1b94ae89-c171-471a-bd34-12ed57a752b8/volumes" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.618268 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" path="/var/lib/kubelet/pods/50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4/volumes" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.618731 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" path="/var/lib/kubelet/pods/632b1594-6205-4741-a50d-fdd157e0f47e/volumes" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.619728 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" path="/var/lib/kubelet/pods/9b9447f4-590b-4d1d-8105-bfad4f700daa/volumes" Dec 05 06:52:52 crc kubenswrapper[4863]: I1205 06:52:52.620291 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb921038-e831-47ea-af78-e21e51079af7" path="/var/lib/kubelet/pods/cb921038-e831-47ea-af78-e21e51079af7/volumes" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.422755 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dq86d"] Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423123 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerName="marketplace-operator" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423640 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerName="marketplace-operator" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423674 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423690 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423717 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423734 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423752 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423766 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423786 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423801 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423831 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423845 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423863 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423878 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423896 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423910 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423933 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423951 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.423973 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.423989 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.424018 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424034 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.424069 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424086 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="extract-content" Dec 05 06:52:53 crc kubenswrapper[4863]: E1205 06:52:53.424110 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424127 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="extract-utilities" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424330 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="50db14aa-39fc-45f0-bccf-2fbe8e5e1ed4" containerName="marketplace-operator" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424368 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9447f4-590b-4d1d-8105-bfad4f700daa" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424392 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb921038-e831-47ea-af78-e21e51079af7" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424412 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="632b1594-6205-4741-a50d-fdd157e0f47e" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.424437 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b94ae89-c171-471a-bd34-12ed57a752b8" containerName="registry-server" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.425659 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.429044 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.444230 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dq86d"] Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.531102 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj2vd\" (UniqueName: \"kubernetes.io/projected/19f2d47e-7dc6-494d-9bfc-b77ba012844c-kube-api-access-zj2vd\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.531209 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-catalog-content\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.531282 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-utilities\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.632275 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj2vd\" (UniqueName: \"kubernetes.io/projected/19f2d47e-7dc6-494d-9bfc-b77ba012844c-kube-api-access-zj2vd\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.632649 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-catalog-content\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.632748 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-utilities\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.633300 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-catalog-content\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.633506 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f2d47e-7dc6-494d-9bfc-b77ba012844c-utilities\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.656197 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj2vd\" (UniqueName: \"kubernetes.io/projected/19f2d47e-7dc6-494d-9bfc-b77ba012844c-kube-api-access-zj2vd\") pod \"redhat-marketplace-dq86d\" (UID: \"19f2d47e-7dc6-494d-9bfc-b77ba012844c\") " pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:53 crc kubenswrapper[4863]: I1205 06:52:53.755842 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.016352 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gbnbn"] Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.017885 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.020362 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.032539 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gbnbn"] Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.138821 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzs7b\" (UniqueName: \"kubernetes.io/projected/5102b056-af0a-435a-9d9a-c711ec903c32-kube-api-access-vzs7b\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.139034 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-catalog-content\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.139277 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-utilities\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.169935 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dq86d"] Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.240132 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzs7b\" (UniqueName: \"kubernetes.io/projected/5102b056-af0a-435a-9d9a-c711ec903c32-kube-api-access-vzs7b\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.240181 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-catalog-content\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.240219 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-utilities\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.240673 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-utilities\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.240737 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5102b056-af0a-435a-9d9a-c711ec903c32-catalog-content\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.260111 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzs7b\" (UniqueName: \"kubernetes.io/projected/5102b056-af0a-435a-9d9a-c711ec903c32-kube-api-access-vzs7b\") pod \"redhat-operators-gbnbn\" (UID: \"5102b056-af0a-435a-9d9a-c711ec903c32\") " pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.339231 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.542333 4863 generic.go:334] "Generic (PLEG): container finished" podID="19f2d47e-7dc6-494d-9bfc-b77ba012844c" containerID="5ef6ca9ff1841409dc20162e06ea3e1da3dfd2ce4bb2594703a1e9601525becf" exitCode=0 Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.542500 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dq86d" event={"ID":"19f2d47e-7dc6-494d-9bfc-b77ba012844c","Type":"ContainerDied","Data":"5ef6ca9ff1841409dc20162e06ea3e1da3dfd2ce4bb2594703a1e9601525becf"} Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.542783 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dq86d" event={"ID":"19f2d47e-7dc6-494d-9bfc-b77ba012844c","Type":"ContainerStarted","Data":"d93e2c710d6bbc69f9e9bad05639af8810693c651b4748da0e8498b33ef77cae"} Dec 05 06:52:54 crc kubenswrapper[4863]: I1205 06:52:54.786394 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gbnbn"] Dec 05 06:52:54 crc kubenswrapper[4863]: W1205 06:52:54.792025 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5102b056_af0a_435a_9d9a_c711ec903c32.slice/crio-98cee2a1eee75101a7001352617d50157618070f4953cb385fc44b0ce08be405 WatchSource:0}: Error finding container 98cee2a1eee75101a7001352617d50157618070f4953cb385fc44b0ce08be405: Status 404 returned error can't find the container with id 98cee2a1eee75101a7001352617d50157618070f4953cb385fc44b0ce08be405 Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.558408 4863 generic.go:334] "Generic (PLEG): container finished" podID="19f2d47e-7dc6-494d-9bfc-b77ba012844c" containerID="713f628029e40150eacb3ab41bb794adcb4a1b0b498b2af4877469a8dd33522d" exitCode=0 Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.558491 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dq86d" event={"ID":"19f2d47e-7dc6-494d-9bfc-b77ba012844c","Type":"ContainerDied","Data":"713f628029e40150eacb3ab41bb794adcb4a1b0b498b2af4877469a8dd33522d"} Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.561318 4863 generic.go:334] "Generic (PLEG): container finished" podID="5102b056-af0a-435a-9d9a-c711ec903c32" containerID="e0796d0f0714a7e282a0ddbe15a88a92f50773cdcbbea3552467ccb8a9da21f9" exitCode=0 Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.561341 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gbnbn" event={"ID":"5102b056-af0a-435a-9d9a-c711ec903c32","Type":"ContainerDied","Data":"e0796d0f0714a7e282a0ddbe15a88a92f50773cdcbbea3552467ccb8a9da21f9"} Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.561359 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gbnbn" event={"ID":"5102b056-af0a-435a-9d9a-c711ec903c32","Type":"ContainerStarted","Data":"98cee2a1eee75101a7001352617d50157618070f4953cb385fc44b0ce08be405"} Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.822236 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.824367 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.825980 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.828746 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.859012 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bb7b\" (UniqueName: \"kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.859085 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.859104 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.960635 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bb7b\" (UniqueName: \"kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.960724 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.960749 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.961241 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.961313 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:55 crc kubenswrapper[4863]: I1205 06:52:55.985058 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bb7b\" (UniqueName: \"kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b\") pod \"community-operators-9p9x6\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.154415 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.429242 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.431563 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.434036 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.435068 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.468037 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.468138 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcz5x\" (UniqueName: \"kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.468188 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.568918 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcz5x\" (UniqueName: \"kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.568975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.569025 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.569511 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.569526 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.570173 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gbnbn" event={"ID":"5102b056-af0a-435a-9d9a-c711ec903c32","Type":"ContainerStarted","Data":"4fcc8fb165f1bea614e96ad4aa67f9aaa2a573d2f3e35a9b986ab18ac1afb2f9"} Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.573650 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dq86d" event={"ID":"19f2d47e-7dc6-494d-9bfc-b77ba012844c","Type":"ContainerStarted","Data":"3f14aab23f39151b891432ecd81ffacc7d1fdd4d863f89bcd7544237599aefac"} Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.589533 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.598541 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcz5x\" (UniqueName: \"kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x\") pod \"certified-operators-qpw49\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.605806 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dq86d" podStartSLOduration=2.186619409 podStartE2EDuration="3.605789747s" podCreationTimestamp="2025-12-05 06:52:53 +0000 UTC" firstStartedPulling="2025-12-05 06:52:54.545705564 +0000 UTC m=+402.271702604" lastFinishedPulling="2025-12-05 06:52:55.964875872 +0000 UTC m=+403.690872942" observedRunningTime="2025-12-05 06:52:56.604245637 +0000 UTC m=+404.330242677" watchObservedRunningTime="2025-12-05 06:52:56.605789747 +0000 UTC m=+404.331786787" Dec 05 06:52:56 crc kubenswrapper[4863]: I1205 06:52:56.758427 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.187263 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.582414 4863 generic.go:334] "Generic (PLEG): container finished" podID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerID="5adc91e3de2512051eda7786a20d97b7bd28bc5a3754f3282d3cc5889a8f2c94" exitCode=0 Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.582712 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerDied","Data":"5adc91e3de2512051eda7786a20d97b7bd28bc5a3754f3282d3cc5889a8f2c94"} Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.582743 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerStarted","Data":"501784ac6206f655d2f7c746783da8c0b071560716624c8b7c37beb95d4f3b5d"} Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.587000 4863 generic.go:334] "Generic (PLEG): container finished" podID="5102b056-af0a-435a-9d9a-c711ec903c32" containerID="4fcc8fb165f1bea614e96ad4aa67f9aaa2a573d2f3e35a9b986ab18ac1afb2f9" exitCode=0 Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.587186 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gbnbn" event={"ID":"5102b056-af0a-435a-9d9a-c711ec903c32","Type":"ContainerDied","Data":"4fcc8fb165f1bea614e96ad4aa67f9aaa2a573d2f3e35a9b986ab18ac1afb2f9"} Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.590553 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerID="4a5d4262e13038176f3a0d29e7ad6deaede03014ee79e490312bd26825517428" exitCode=0 Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.590699 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerDied","Data":"4a5d4262e13038176f3a0d29e7ad6deaede03014ee79e490312bd26825517428"} Dec 05 06:52:57 crc kubenswrapper[4863]: I1205 06:52:57.590739 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerStarted","Data":"be5329e8720f357007b80b61d22a578d02dcb1ebab26a22366ca3d7dcd5463a0"} Dec 05 06:52:58 crc kubenswrapper[4863]: I1205 06:52:58.598590 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerStarted","Data":"c26886ad130e068fa17bf5bce03ebce2bb03a2b7051ebc88cad5a5ac59126e58"} Dec 05 06:52:58 crc kubenswrapper[4863]: I1205 06:52:58.612424 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gbnbn" event={"ID":"5102b056-af0a-435a-9d9a-c711ec903c32","Type":"ContainerStarted","Data":"b691c2ec8830cc276f50f352423c367cff09654ff9e2309b881ea20975265ac4"} Dec 05 06:52:58 crc kubenswrapper[4863]: I1205 06:52:58.612513 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerStarted","Data":"827ab0b477d92a39b9d2d819a8535b0efa44fe45bb27c198d734bcbffe178ad0"} Dec 05 06:52:58 crc kubenswrapper[4863]: I1205 06:52:58.679419 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gbnbn" podStartSLOduration=2.22574399 podStartE2EDuration="4.679401944s" podCreationTimestamp="2025-12-05 06:52:54 +0000 UTC" firstStartedPulling="2025-12-05 06:52:55.564064957 +0000 UTC m=+403.290061997" lastFinishedPulling="2025-12-05 06:52:58.017722921 +0000 UTC m=+405.743719951" observedRunningTime="2025-12-05 06:52:58.675732351 +0000 UTC m=+406.401729421" watchObservedRunningTime="2025-12-05 06:52:58.679401944 +0000 UTC m=+406.405398984" Dec 05 06:52:59 crc kubenswrapper[4863]: I1205 06:52:59.616121 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerID="827ab0b477d92a39b9d2d819a8535b0efa44fe45bb27c198d734bcbffe178ad0" exitCode=0 Dec 05 06:52:59 crc kubenswrapper[4863]: I1205 06:52:59.616199 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerDied","Data":"827ab0b477d92a39b9d2d819a8535b0efa44fe45bb27c198d734bcbffe178ad0"} Dec 05 06:52:59 crc kubenswrapper[4863]: I1205 06:52:59.618232 4863 generic.go:334] "Generic (PLEG): container finished" podID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerID="c26886ad130e068fa17bf5bce03ebce2bb03a2b7051ebc88cad5a5ac59126e58" exitCode=0 Dec 05 06:52:59 crc kubenswrapper[4863]: I1205 06:52:59.619184 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerDied","Data":"c26886ad130e068fa17bf5bce03ebce2bb03a2b7051ebc88cad5a5ac59126e58"} Dec 05 06:53:01 crc kubenswrapper[4863]: I1205 06:53:01.631066 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerStarted","Data":"39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028"} Dec 05 06:53:01 crc kubenswrapper[4863]: I1205 06:53:01.634177 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerStarted","Data":"41a188b632d2ee10101dc5ded6e8c77b9192b92844cadb48a8eb89e24d709486"} Dec 05 06:53:01 crc kubenswrapper[4863]: I1205 06:53:01.652879 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qpw49" podStartSLOduration=2.771381249 podStartE2EDuration="5.652860936s" podCreationTimestamp="2025-12-05 06:52:56 +0000 UTC" firstStartedPulling="2025-12-05 06:52:57.58526494 +0000 UTC m=+405.311261990" lastFinishedPulling="2025-12-05 06:53:00.466744637 +0000 UTC m=+408.192741677" observedRunningTime="2025-12-05 06:53:01.651271696 +0000 UTC m=+409.377268746" watchObservedRunningTime="2025-12-05 06:53:01.652860936 +0000 UTC m=+409.378857976" Dec 05 06:53:01 crc kubenswrapper[4863]: I1205 06:53:01.676644 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9p9x6" podStartSLOduration=3.881533977 podStartE2EDuration="6.676623852s" podCreationTimestamp="2025-12-05 06:52:55 +0000 UTC" firstStartedPulling="2025-12-05 06:52:57.592563356 +0000 UTC m=+405.318560396" lastFinishedPulling="2025-12-05 06:53:00.387653231 +0000 UTC m=+408.113650271" observedRunningTime="2025-12-05 06:53:01.672031604 +0000 UTC m=+409.398028644" watchObservedRunningTime="2025-12-05 06:53:01.676623852 +0000 UTC m=+409.402620892" Dec 05 06:53:03 crc kubenswrapper[4863]: I1205 06:53:03.757629 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:53:03 crc kubenswrapper[4863]: I1205 06:53:03.757978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:53:03 crc kubenswrapper[4863]: I1205 06:53:03.827007 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:53:04 crc kubenswrapper[4863]: I1205 06:53:04.339794 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:53:04 crc kubenswrapper[4863]: I1205 06:53:04.340121 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:53:04 crc kubenswrapper[4863]: I1205 06:53:04.396359 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:53:04 crc kubenswrapper[4863]: I1205 06:53:04.690381 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gbnbn" Dec 05 06:53:04 crc kubenswrapper[4863]: I1205 06:53:04.713601 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dq86d" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.155103 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.155196 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.193892 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.710432 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.759110 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.759249 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:53:06 crc kubenswrapper[4863]: I1205 06:53:06.793970 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:53:07 crc kubenswrapper[4863]: I1205 06:53:07.718090 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 06:53:08 crc kubenswrapper[4863]: I1205 06:53:08.463851 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:53:08 crc kubenswrapper[4863]: I1205 06:53:08.463915 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:53:08 crc kubenswrapper[4863]: I1205 06:53:08.463957 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:53:08 crc kubenswrapper[4863]: I1205 06:53:08.464598 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:53:08 crc kubenswrapper[4863]: I1205 06:53:08.464682 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2" gracePeriod=600 Dec 05 06:53:10 crc kubenswrapper[4863]: I1205 06:53:10.682660 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2" exitCode=0 Dec 05 06:53:10 crc kubenswrapper[4863]: I1205 06:53:10.682736 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2"} Dec 05 06:53:10 crc kubenswrapper[4863]: I1205 06:53:10.683028 4863 scope.go:117] "RemoveContainer" containerID="fc9bba526184aadd4ee54609726dd329742fa6e67bd5a4029ca700481303097c" Dec 05 06:53:11 crc kubenswrapper[4863]: I1205 06:53:11.690031 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b"} Dec 05 06:55:12 crc kubenswrapper[4863]: I1205 06:55:12.792938 4863 scope.go:117] "RemoveContainer" containerID="bb64b3de9f5861d8ff84407ac4ccbd05c81f5d04a873374498811d8b3783dd55" Dec 05 06:55:38 crc kubenswrapper[4863]: I1205 06:55:38.464109 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:55:38 crc kubenswrapper[4863]: I1205 06:55:38.464805 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:56:08 crc kubenswrapper[4863]: I1205 06:56:08.464213 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:56:08 crc kubenswrapper[4863]: I1205 06:56:08.464959 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:56:12 crc kubenswrapper[4863]: I1205 06:56:12.830936 4863 scope.go:117] "RemoveContainer" containerID="c54b3e417651f350eb943e5035a732af9866d3918f53db492520e7d10c77f3ee" Dec 05 06:56:12 crc kubenswrapper[4863]: I1205 06:56:12.864971 4863 scope.go:117] "RemoveContainer" containerID="ba78471894551319427a629f6294389dcab18a2de510afaee0caf7065c73e1e5" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.503714 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k7trv"] Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.504939 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.522943 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k7trv"] Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.691630 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-bound-sa-token\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.691706 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.691860 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.691926 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwd5m\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-kube-api-access-pwd5m\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.691992 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-certificates\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.692026 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.692065 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-trusted-ca\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.692225 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-tls\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.714825 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.793690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794057 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwd5m\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-kube-api-access-pwd5m\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794220 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-certificates\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794349 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-trusted-ca\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794536 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-tls\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794739 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-bound-sa-token\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.794883 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.795509 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.795632 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-certificates\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.796007 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-trusted-ca\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.802680 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-registry-tls\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.807653 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.810874 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-bound-sa-token\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.814901 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwd5m\" (UniqueName: \"kubernetes.io/projected/4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f-kube-api-access-pwd5m\") pod \"image-registry-66df7c8f76-k7trv\" (UID: \"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f\") " pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:34 crc kubenswrapper[4863]: I1205 06:56:34.819984 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:35 crc kubenswrapper[4863]: I1205 06:56:35.043811 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k7trv"] Dec 05 06:56:35 crc kubenswrapper[4863]: I1205 06:56:35.079561 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" event={"ID":"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f","Type":"ContainerStarted","Data":"64823d87e0d893d7714144968025ba29c71fcb77239b7fd3a5558875c3c0c4a8"} Dec 05 06:56:36 crc kubenswrapper[4863]: I1205 06:56:36.087298 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" event={"ID":"4b4ebf03-fd96-4a5d-8bbc-ffa6c183ad2f","Type":"ContainerStarted","Data":"03b8148d60b3853c606cfec5d130022ff453d4d01cb3ae0cf62053ac29864a77"} Dec 05 06:56:36 crc kubenswrapper[4863]: I1205 06:56:36.087558 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:36 crc kubenswrapper[4863]: I1205 06:56:36.122845 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" podStartSLOduration=2.122829639 podStartE2EDuration="2.122829639s" podCreationTimestamp="2025-12-05 06:56:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:56:36.120269308 +0000 UTC m=+623.846266388" watchObservedRunningTime="2025-12-05 06:56:36.122829639 +0000 UTC m=+623.848826679" Dec 05 06:56:38 crc kubenswrapper[4863]: I1205 06:56:38.464418 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:56:38 crc kubenswrapper[4863]: I1205 06:56:38.464902 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:56:38 crc kubenswrapper[4863]: I1205 06:56:38.465001 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:56:38 crc kubenswrapper[4863]: I1205 06:56:38.465797 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:56:38 crc kubenswrapper[4863]: I1205 06:56:38.466012 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b" gracePeriod=600 Dec 05 06:56:39 crc kubenswrapper[4863]: I1205 06:56:39.110641 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b" exitCode=0 Dec 05 06:56:39 crc kubenswrapper[4863]: I1205 06:56:39.110710 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b"} Dec 05 06:56:39 crc kubenswrapper[4863]: I1205 06:56:39.110766 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0"} Dec 05 06:56:39 crc kubenswrapper[4863]: I1205 06:56:39.110791 4863 scope.go:117] "RemoveContainer" containerID="9e55052d654a4e9837e8299382b6eba9dfb2f4bbe65e2faea6b5912ba55582f2" Dec 05 06:56:54 crc kubenswrapper[4863]: I1205 06:56:54.828850 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-k7trv" Dec 05 06:56:54 crc kubenswrapper[4863]: I1205 06:56:54.919655 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:57:19 crc kubenswrapper[4863]: I1205 06:57:19.980328 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" podUID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" containerName="registry" containerID="cri-o://32f93a12451f1aa62f4826fb9e9d9acfea366557bbfa2610ff5244ccf4c93f4c" gracePeriod=30 Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.401230 4863 generic.go:334] "Generic (PLEG): container finished" podID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" containerID="32f93a12451f1aa62f4826fb9e9d9acfea366557bbfa2610ff5244ccf4c93f4c" exitCode=0 Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.401363 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" event={"ID":"ae6653d2-88b7-41b4-880f-aaa0a19c26dc","Type":"ContainerDied","Data":"32f93a12451f1aa62f4826fb9e9d9acfea366557bbfa2610ff5244ccf4c93f4c"} Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.401697 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" event={"ID":"ae6653d2-88b7-41b4-880f-aaa0a19c26dc","Type":"ContainerDied","Data":"1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa"} Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.401725 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cbbbc859b3ea682a0c90a77d94bdb3e4eba01fca4882c4636018c93703f74aa" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.437549 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.486554 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxzcw\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.486666 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.486745 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.486807 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.486843 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.487101 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.487148 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.487183 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls\") pod \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\" (UID: \"ae6653d2-88b7-41b4-880f-aaa0a19c26dc\") " Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.487978 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.488283 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.494099 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.497997 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw" (OuterVolumeSpecName: "kube-api-access-rxzcw") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "kube-api-access-rxzcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.499146 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.505290 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.508952 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.511930 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ae6653d2-88b7-41b4-880f-aaa0a19c26dc" (UID: "ae6653d2-88b7-41b4-880f-aaa0a19c26dc"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589208 4863 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589256 4863 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589271 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxzcw\" (UniqueName: \"kubernetes.io/projected/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-kube-api-access-rxzcw\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589284 4863 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589295 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589306 4863 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:20 crc kubenswrapper[4863]: I1205 06:57:20.589318 4863 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae6653d2-88b7-41b4-880f-aaa0a19c26dc-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 06:57:21 crc kubenswrapper[4863]: I1205 06:57:21.409721 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rlw7x" Dec 05 06:57:21 crc kubenswrapper[4863]: I1205 06:57:21.446763 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:57:21 crc kubenswrapper[4863]: I1205 06:57:21.463520 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rlw7x"] Dec 05 06:57:22 crc kubenswrapper[4863]: I1205 06:57:22.613969 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" path="/var/lib/kubelet/pods/ae6653d2-88b7-41b4-880f-aaa0a19c26dc/volumes" Dec 05 06:58:12 crc kubenswrapper[4863]: I1205 06:58:12.935211 4863 scope.go:117] "RemoveContainer" containerID="32f93a12451f1aa62f4826fb9e9d9acfea366557bbfa2610ff5244ccf4c93f4c" Dec 05 06:58:38 crc kubenswrapper[4863]: I1205 06:58:38.464658 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:58:38 crc kubenswrapper[4863]: I1205 06:58:38.465672 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:58:59 crc kubenswrapper[4863]: I1205 06:58:59.028120 4863 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.010888 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:02 crc kubenswrapper[4863]: E1205 06:59:02.011205 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" containerName="registry" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.011230 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" containerName="registry" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.011428 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae6653d2-88b7-41b4-880f-aaa0a19c26dc" containerName="registry" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.012704 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.030758 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.209738 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.209782 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.209810 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9npt\" (UniqueName: \"kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.311376 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.311431 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.311457 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9npt\" (UniqueName: \"kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.312350 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.312372 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.344733 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9npt\" (UniqueName: \"kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt\") pod \"redhat-marketplace-j5lht\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:02 crc kubenswrapper[4863]: I1205 06:59:02.631074 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:03 crc kubenswrapper[4863]: I1205 06:59:03.035986 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:03 crc kubenswrapper[4863]: W1205 06:59:03.053967 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa3f157c_a4c5_4979_887c_8a43add50c54.slice/crio-1e6df65a4ddc6aa053e710526612864422cc8a6dcb4a6a54f157590705d61c84 WatchSource:0}: Error finding container 1e6df65a4ddc6aa053e710526612864422cc8a6dcb4a6a54f157590705d61c84: Status 404 returned error can't find the container with id 1e6df65a4ddc6aa053e710526612864422cc8a6dcb4a6a54f157590705d61c84 Dec 05 06:59:03 crc kubenswrapper[4863]: I1205 06:59:03.097152 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerStarted","Data":"1e6df65a4ddc6aa053e710526612864422cc8a6dcb4a6a54f157590705d61c84"} Dec 05 06:59:04 crc kubenswrapper[4863]: I1205 06:59:04.104683 4863 generic.go:334] "Generic (PLEG): container finished" podID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerID="2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f" exitCode=0 Dec 05 06:59:04 crc kubenswrapper[4863]: I1205 06:59:04.104754 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerDied","Data":"2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f"} Dec 05 06:59:04 crc kubenswrapper[4863]: I1205 06:59:04.108810 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:59:05 crc kubenswrapper[4863]: I1205 06:59:05.115171 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerStarted","Data":"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315"} Dec 05 06:59:06 crc kubenswrapper[4863]: I1205 06:59:06.125742 4863 generic.go:334] "Generic (PLEG): container finished" podID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerID="1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315" exitCode=0 Dec 05 06:59:06 crc kubenswrapper[4863]: I1205 06:59:06.125819 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerDied","Data":"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315"} Dec 05 06:59:07 crc kubenswrapper[4863]: I1205 06:59:07.137837 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerStarted","Data":"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a"} Dec 05 06:59:07 crc kubenswrapper[4863]: I1205 06:59:07.174128 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j5lht" podStartSLOduration=3.665174689 podStartE2EDuration="6.174111965s" podCreationTimestamp="2025-12-05 06:59:01 +0000 UTC" firstStartedPulling="2025-12-05 06:59:04.108629082 +0000 UTC m=+771.834626122" lastFinishedPulling="2025-12-05 06:59:06.617566348 +0000 UTC m=+774.343563398" observedRunningTime="2025-12-05 06:59:07.172673632 +0000 UTC m=+774.898670742" watchObservedRunningTime="2025-12-05 06:59:07.174111965 +0000 UTC m=+774.900108995" Dec 05 06:59:07 crc kubenswrapper[4863]: I1205 06:59:07.834448 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:07 crc kubenswrapper[4863]: I1205 06:59:07.836991 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:07 crc kubenswrapper[4863]: I1205 06:59:07.840423 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.031339 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.031838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8j7j\" (UniqueName: \"kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.031945 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.132786 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.132855 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.133429 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.133429 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.133591 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8j7j\" (UniqueName: \"kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.158848 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8j7j\" (UniqueName: \"kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j\") pod \"certified-operators-rs7xx\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.161279 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.350169 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.465759 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:59:08 crc kubenswrapper[4863]: I1205 06:59:08.465814 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:59:09 crc kubenswrapper[4863]: I1205 06:59:09.154575 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5348155-2208-4687-98db-d82ade18136d" containerID="18b46874811877567e823dec5ea4d253fbce23e87eb72f4ca97833020b09b3b4" exitCode=0 Dec 05 06:59:09 crc kubenswrapper[4863]: I1205 06:59:09.154621 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerDied","Data":"18b46874811877567e823dec5ea4d253fbce23e87eb72f4ca97833020b09b3b4"} Dec 05 06:59:09 crc kubenswrapper[4863]: I1205 06:59:09.154651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerStarted","Data":"b8ad168b120633393375e3c3a5a1e7166851b8718839a90b07ec8c7051416ddb"} Dec 05 06:59:10 crc kubenswrapper[4863]: I1205 06:59:10.163869 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerStarted","Data":"b1f2094f4cd405cfd016e0cc140c5320b90993cbc6a99b539729e895cdd132ae"} Dec 05 06:59:11 crc kubenswrapper[4863]: I1205 06:59:11.172141 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5348155-2208-4687-98db-d82ade18136d" containerID="b1f2094f4cd405cfd016e0cc140c5320b90993cbc6a99b539729e895cdd132ae" exitCode=0 Dec 05 06:59:11 crc kubenswrapper[4863]: I1205 06:59:11.172186 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerDied","Data":"b1f2094f4cd405cfd016e0cc140c5320b90993cbc6a99b539729e895cdd132ae"} Dec 05 06:59:12 crc kubenswrapper[4863]: I1205 06:59:12.181340 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerStarted","Data":"1676f727584beb7f053a2f5b5662ec86ebec6dbfa83ffd8cb4f7aac8ab0b8fef"} Dec 05 06:59:12 crc kubenswrapper[4863]: I1205 06:59:12.202349 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rs7xx" podStartSLOduration=2.502316795 podStartE2EDuration="5.202324774s" podCreationTimestamp="2025-12-05 06:59:07 +0000 UTC" firstStartedPulling="2025-12-05 06:59:09.156561864 +0000 UTC m=+776.882558914" lastFinishedPulling="2025-12-05 06:59:11.856569843 +0000 UTC m=+779.582566893" observedRunningTime="2025-12-05 06:59:12.199834247 +0000 UTC m=+779.925831297" watchObservedRunningTime="2025-12-05 06:59:12.202324774 +0000 UTC m=+779.928321834" Dec 05 06:59:12 crc kubenswrapper[4863]: I1205 06:59:12.632244 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:12 crc kubenswrapper[4863]: I1205 06:59:12.632305 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:12 crc kubenswrapper[4863]: I1205 06:59:12.705683 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:13 crc kubenswrapper[4863]: I1205 06:59:13.253960 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:14 crc kubenswrapper[4863]: I1205 06:59:14.579686 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:15 crc kubenswrapper[4863]: I1205 06:59:15.201610 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j5lht" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="registry-server" containerID="cri-o://a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a" gracePeriod=2 Dec 05 06:59:15 crc kubenswrapper[4863]: I1205 06:59:15.890455 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.040741 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content\") pod \"fa3f157c-a4c5-4979-887c-8a43add50c54\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.040801 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities\") pod \"fa3f157c-a4c5-4979-887c-8a43add50c54\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.040917 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9npt\" (UniqueName: \"kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt\") pod \"fa3f157c-a4c5-4979-887c-8a43add50c54\" (UID: \"fa3f157c-a4c5-4979-887c-8a43add50c54\") " Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.041903 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities" (OuterVolumeSpecName: "utilities") pod "fa3f157c-a4c5-4979-887c-8a43add50c54" (UID: "fa3f157c-a4c5-4979-887c-8a43add50c54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.047046 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt" (OuterVolumeSpecName: "kube-api-access-d9npt") pod "fa3f157c-a4c5-4979-887c-8a43add50c54" (UID: "fa3f157c-a4c5-4979-887c-8a43add50c54"). InnerVolumeSpecName "kube-api-access-d9npt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.072941 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa3f157c-a4c5-4979-887c-8a43add50c54" (UID: "fa3f157c-a4c5-4979-887c-8a43add50c54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.142076 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9npt\" (UniqueName: \"kubernetes.io/projected/fa3f157c-a4c5-4979-887c-8a43add50c54-kube-api-access-d9npt\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.142115 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.142125 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3f157c-a4c5-4979-887c-8a43add50c54-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.211416 4863 generic.go:334] "Generic (PLEG): container finished" podID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerID="a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a" exitCode=0 Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.211509 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerDied","Data":"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a"} Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.211544 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5lht" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.211592 4863 scope.go:117] "RemoveContainer" containerID="a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.211549 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5lht" event={"ID":"fa3f157c-a4c5-4979-887c-8a43add50c54","Type":"ContainerDied","Data":"1e6df65a4ddc6aa053e710526612864422cc8a6dcb4a6a54f157590705d61c84"} Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.231654 4863 scope.go:117] "RemoveContainer" containerID="1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.253107 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.256616 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5lht"] Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.266236 4863 scope.go:117] "RemoveContainer" containerID="2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.293762 4863 scope.go:117] "RemoveContainer" containerID="a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a" Dec 05 06:59:16 crc kubenswrapper[4863]: E1205 06:59:16.294096 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a\": container with ID starting with a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a not found: ID does not exist" containerID="a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.294126 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a"} err="failed to get container status \"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a\": rpc error: code = NotFound desc = could not find container \"a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a\": container with ID starting with a68e458c4eabb968bddae04c58f6432485fa0abe0d89f5eedc2c2e0437e1cd4a not found: ID does not exist" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.294157 4863 scope.go:117] "RemoveContainer" containerID="1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315" Dec 05 06:59:16 crc kubenswrapper[4863]: E1205 06:59:16.294393 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315\": container with ID starting with 1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315 not found: ID does not exist" containerID="1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.294412 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315"} err="failed to get container status \"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315\": rpc error: code = NotFound desc = could not find container \"1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315\": container with ID starting with 1de25a0ad38e3e8533ca96961e7b16748d368556c2c0fe2675a682195bfb5315 not found: ID does not exist" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.294424 4863 scope.go:117] "RemoveContainer" containerID="2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f" Dec 05 06:59:16 crc kubenswrapper[4863]: E1205 06:59:16.294797 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f\": container with ID starting with 2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f not found: ID does not exist" containerID="2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.294820 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f"} err="failed to get container status \"2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f\": rpc error: code = NotFound desc = could not find container \"2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f\": container with ID starting with 2caad68523e94e5782739cfa93ecfb5b213438d4684145c302967b4bd9f4736f not found: ID does not exist" Dec 05 06:59:16 crc kubenswrapper[4863]: I1205 06:59:16.614827 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" path="/var/lib/kubelet/pods/fa3f157c-a4c5-4979-887c-8a43add50c54/volumes" Dec 05 06:59:18 crc kubenswrapper[4863]: I1205 06:59:18.161997 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:18 crc kubenswrapper[4863]: I1205 06:59:18.162569 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:18 crc kubenswrapper[4863]: I1205 06:59:18.231424 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:18 crc kubenswrapper[4863]: I1205 06:59:18.300885 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.593218 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:19 crc kubenswrapper[4863]: E1205 06:59:19.593810 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="extract-utilities" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.593825 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="extract-utilities" Dec 05 06:59:19 crc kubenswrapper[4863]: E1205 06:59:19.593867 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="registry-server" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.593878 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="registry-server" Dec 05 06:59:19 crc kubenswrapper[4863]: E1205 06:59:19.593916 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="extract-content" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.593927 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="extract-content" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.594059 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa3f157c-a4c5-4979-887c-8a43add50c54" containerName="registry-server" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.597581 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.612772 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.700934 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tttc9\" (UniqueName: \"kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.701164 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.701313 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.802407 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tttc9\" (UniqueName: \"kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.802500 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.803173 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.803267 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.803616 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.827388 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tttc9\" (UniqueName: \"kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9\") pod \"community-operators-jhtzh\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:19 crc kubenswrapper[4863]: I1205 06:59:19.938842 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:20 crc kubenswrapper[4863]: I1205 06:59:20.208255 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:20 crc kubenswrapper[4863]: I1205 06:59:20.282456 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerStarted","Data":"1a2dc00ecdfcabb2debc09cd82ec7f983517a52a478b00ed36b19dc7713d3397"} Dec 05 06:59:20 crc kubenswrapper[4863]: I1205 06:59:20.575494 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:20 crc kubenswrapper[4863]: I1205 06:59:20.575732 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rs7xx" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="registry-server" containerID="cri-o://1676f727584beb7f053a2f5b5662ec86ebec6dbfa83ffd8cb4f7aac8ab0b8fef" gracePeriod=2 Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.289109 4863 generic.go:334] "Generic (PLEG): container finished" podID="409adf73-2361-40af-be87-7b4c283b5540" containerID="a0c8a40f82a2e0e753f89bdae3c468d1fe54e238840773fdf3629ae3baddab2b" exitCode=0 Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.289281 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerDied","Data":"a0c8a40f82a2e0e753f89bdae3c468d1fe54e238840773fdf3629ae3baddab2b"} Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.294205 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5348155-2208-4687-98db-d82ade18136d" containerID="1676f727584beb7f053a2f5b5662ec86ebec6dbfa83ffd8cb4f7aac8ab0b8fef" exitCode=0 Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.294224 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerDied","Data":"1676f727584beb7f053a2f5b5662ec86ebec6dbfa83ffd8cb4f7aac8ab0b8fef"} Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.474977 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.523720 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities\") pod \"c5348155-2208-4687-98db-d82ade18136d\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.523767 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8j7j\" (UniqueName: \"kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j\") pod \"c5348155-2208-4687-98db-d82ade18136d\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.523813 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content\") pod \"c5348155-2208-4687-98db-d82ade18136d\" (UID: \"c5348155-2208-4687-98db-d82ade18136d\") " Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.524802 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities" (OuterVolumeSpecName: "utilities") pod "c5348155-2208-4687-98db-d82ade18136d" (UID: "c5348155-2208-4687-98db-d82ade18136d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.529855 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j" (OuterVolumeSpecName: "kube-api-access-v8j7j") pod "c5348155-2208-4687-98db-d82ade18136d" (UID: "c5348155-2208-4687-98db-d82ade18136d"). InnerVolumeSpecName "kube-api-access-v8j7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.588832 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5348155-2208-4687-98db-d82ade18136d" (UID: "c5348155-2208-4687-98db-d82ade18136d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.626645 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.626685 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8j7j\" (UniqueName: \"kubernetes.io/projected/c5348155-2208-4687-98db-d82ade18136d-kube-api-access-v8j7j\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:21 crc kubenswrapper[4863]: I1205 06:59:21.626700 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5348155-2208-4687-98db-d82ade18136d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.311388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rs7xx" event={"ID":"c5348155-2208-4687-98db-d82ade18136d","Type":"ContainerDied","Data":"b8ad168b120633393375e3c3a5a1e7166851b8718839a90b07ec8c7051416ddb"} Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.311438 4863 scope.go:117] "RemoveContainer" containerID="1676f727584beb7f053a2f5b5662ec86ebec6dbfa83ffd8cb4f7aac8ab0b8fef" Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.311548 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rs7xx" Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.327641 4863 scope.go:117] "RemoveContainer" containerID="b1f2094f4cd405cfd016e0cc140c5320b90993cbc6a99b539729e895cdd132ae" Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.350531 4863 scope.go:117] "RemoveContainer" containerID="18b46874811877567e823dec5ea4d253fbce23e87eb72f4ca97833020b09b3b4" Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.376481 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.381276 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rs7xx"] Dec 05 06:59:22 crc kubenswrapper[4863]: I1205 06:59:22.609634 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5348155-2208-4687-98db-d82ade18136d" path="/var/lib/kubelet/pods/c5348155-2208-4687-98db-d82ade18136d/volumes" Dec 05 06:59:23 crc kubenswrapper[4863]: I1205 06:59:23.320559 4863 generic.go:334] "Generic (PLEG): container finished" podID="409adf73-2361-40af-be87-7b4c283b5540" containerID="ce2f26584ac35e9be3355b8e86586d12647cacfeb07c887f04973c4215d7d21b" exitCode=0 Dec 05 06:59:23 crc kubenswrapper[4863]: I1205 06:59:23.320635 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerDied","Data":"ce2f26584ac35e9be3355b8e86586d12647cacfeb07c887f04973c4215d7d21b"} Dec 05 06:59:24 crc kubenswrapper[4863]: I1205 06:59:24.327232 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerStarted","Data":"4baa87a6295715cdfacda2547d4f67c10247c29e83f0ef8ccd49b13ae886b34c"} Dec 05 06:59:24 crc kubenswrapper[4863]: I1205 06:59:24.351750 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jhtzh" podStartSLOduration=2.911961917 podStartE2EDuration="5.351728462s" podCreationTimestamp="2025-12-05 06:59:19 +0000 UTC" firstStartedPulling="2025-12-05 06:59:21.291607301 +0000 UTC m=+789.017604351" lastFinishedPulling="2025-12-05 06:59:23.731373846 +0000 UTC m=+791.457370896" observedRunningTime="2025-12-05 06:59:24.348592539 +0000 UTC m=+792.074589619" watchObservedRunningTime="2025-12-05 06:59:24.351728462 +0000 UTC m=+792.077725522" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.585699 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xjcxh"] Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586067 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-controller" containerID="cri-o://13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586368 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-acl-logging" containerID="cri-o://7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586371 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586387 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="sbdb" containerID="cri-o://f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586386 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-node" containerID="cri-o://659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586397 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="nbdb" containerID="cri-o://aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.586398 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="northd" containerID="cri-o://8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.627635 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" containerID="cri-o://313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" gracePeriod=30 Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.869442 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/3.log" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.871877 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovn-acl-logging/0.log" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.872349 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovn-controller/0.log" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.872721 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940125 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q6wt9"] Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940311 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940322 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940329 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940335 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940342 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940347 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940354 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="sbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940359 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="sbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940370 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940375 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940381 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="northd" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940386 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="northd" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940397 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940402 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940409 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="registry-server" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940417 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="registry-server" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940427 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kubecfg-setup" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940433 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kubecfg-setup" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940439 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940445 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940453 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="extract-utilities" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940459 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="extract-utilities" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940468 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-node" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940489 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-node" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940498 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-acl-logging" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940504 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-acl-logging" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940511 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="nbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940516 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="nbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940525 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="extract-content" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940530 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="extract-content" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940608 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940617 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-node" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940622 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="nbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940631 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940638 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940645 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940652 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5348155-2208-4687-98db-d82ade18136d" containerName="registry-server" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940658 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="northd" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940666 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940685 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940693 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovn-acl-logging" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940701 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="sbdb" Dec 05 06:59:25 crc kubenswrapper[4863]: E1205 06:59:25.940786 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940793 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.940870 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerName="ovnkube-controller" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.942391 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984087 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984137 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984175 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984240 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984280 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984287 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984310 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984760 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984893 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984924 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984949 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984967 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984963 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.984985 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985010 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985039 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985059 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985090 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985112 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985162 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985178 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985198 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985215 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q27nh\" (UniqueName: \"kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985237 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985256 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes\") pod \"4e873158-22c6-4eab-9cb1-438b0f50f46d\" (UID: \"4e873158-22c6-4eab-9cb1-438b0f50f46d\") " Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985362 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-log-socket\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985390 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-bin\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985411 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-netns\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985426 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-systemd-units\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985450 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-systemd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985486 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-etc-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985529 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-var-lib-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985546 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-script-lib\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985563 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985579 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-config\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985592 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovn-node-metrics-cert\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpk5k\" (UniqueName: \"kubernetes.io/projected/a1422717-c4ac-4b18-9ab7-58356eedc6c0-kube-api-access-qpk5k\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985625 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-slash\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985638 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-kubelet\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985654 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985669 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-env-overrides\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985685 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-netd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985704 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-node-log\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985722 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985739 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-ovn\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985771 4863 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985782 4863 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985791 4863 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985799 4863 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985807 4863 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.985842 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986054 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash" (OuterVolumeSpecName: "host-slash") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986084 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986149 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986191 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986227 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986293 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket" (OuterVolumeSpecName: "log-socket") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986327 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log" (OuterVolumeSpecName: "node-log") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.986865 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.987321 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.987337 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.991904 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh" (OuterVolumeSpecName: "kube-api-access-q27nh") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "kube-api-access-q27nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.992110 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:59:25 crc kubenswrapper[4863]: I1205 06:59:25.999621 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "4e873158-22c6-4eab-9cb1-438b0f50f46d" (UID: "4e873158-22c6-4eab-9cb1-438b0f50f46d"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086427 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-var-lib-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086570 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-var-lib-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-script-lib\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086886 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086911 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-config\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086926 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovn-node-metrics-cert\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086944 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpk5k\" (UniqueName: \"kubernetes.io/projected/a1422717-c4ac-4b18-9ab7-58356eedc6c0-kube-api-access-qpk5k\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086972 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-slash\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086994 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087010 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-kubelet\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087033 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-env-overrides\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087049 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-netd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087077 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-node-log\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087100 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-ovn\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087114 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087146 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-log-socket\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087186 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-bin\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087218 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-netns\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087234 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-systemd-units\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087273 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-systemd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087288 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-etc-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087351 4863 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087362 4863 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087369 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-slash\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087390 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-ovn\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.086966 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087349 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-node-log\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087372 4863 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087420 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087444 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-log-socket\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087443 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087462 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-kubelet\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087501 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-run-systemd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087523 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-etc-openvswitch\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087542 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-bin\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-cni-netd\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087423 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q27nh\" (UniqueName: \"kubernetes.io/projected/4e873158-22c6-4eab-9cb1-438b0f50f46d-kube-api-access-q27nh\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087593 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-systemd-units\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087662 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a1422717-c4ac-4b18-9ab7-58356eedc6c0-host-run-netns\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087693 4863 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087706 4863 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087669 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-script-lib\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087738 4863 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087747 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087756 4863 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087766 4863 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e873158-22c6-4eab-9cb1-438b0f50f46d-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087776 4863 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087772 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovnkube-config\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087787 4863 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087856 4863 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087871 4863 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087882 4863 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e873158-22c6-4eab-9cb1-438b0f50f46d-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.087906 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a1422717-c4ac-4b18-9ab7-58356eedc6c0-env-overrides\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.090410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a1422717-c4ac-4b18-9ab7-58356eedc6c0-ovn-node-metrics-cert\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.106151 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpk5k\" (UniqueName: \"kubernetes.io/projected/a1422717-c4ac-4b18-9ab7-58356eedc6c0-kube-api-access-qpk5k\") pod \"ovnkube-node-q6wt9\" (UID: \"a1422717-c4ac-4b18-9ab7-58356eedc6c0\") " pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.255691 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:26 crc kubenswrapper[4863]: W1205 06:59:26.275050 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1422717_c4ac_4b18_9ab7_58356eedc6c0.slice/crio-f9c1380142d2ec362c244c7b9b1660f16d2c9599e46ddb03592cd6e822731579 WatchSource:0}: Error finding container f9c1380142d2ec362c244c7b9b1660f16d2c9599e46ddb03592cd6e822731579: Status 404 returned error can't find the container with id f9c1380142d2ec362c244c7b9b1660f16d2c9599e46ddb03592cd6e822731579 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.342805 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/2.log" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.343533 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/1.log" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.343588 4863 generic.go:334] "Generic (PLEG): container finished" podID="b9e2cdef-4a53-4f32-b973-e5d6ba0708db" containerID="a106b5c99fac4aa7ecb61515b8604f83bec91b87b174aeca1a85ca2adb46c601" exitCode=2 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.343656 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerDied","Data":"a106b5c99fac4aa7ecb61515b8604f83bec91b87b174aeca1a85ca2adb46c601"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.343701 4863 scope.go:117] "RemoveContainer" containerID="8fd25427dc5fb231606d42ec1e323093153a093be8b07ed06931f61c800f405c" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.344458 4863 scope.go:117] "RemoveContainer" containerID="a106b5c99fac4aa7ecb61515b8604f83bec91b87b174aeca1a85ca2adb46c601" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.345759 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"f9c1380142d2ec362c244c7b9b1660f16d2c9599e46ddb03592cd6e822731579"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.350761 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovnkube-controller/3.log" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.358608 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovn-acl-logging/0.log" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.359363 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xjcxh_4e873158-22c6-4eab-9cb1-438b0f50f46d/ovn-controller/0.log" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360755 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360787 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360799 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360808 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360817 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360826 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" exitCode=0 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360834 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" exitCode=143 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360843 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e873158-22c6-4eab-9cb1-438b0f50f46d" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" exitCode=143 Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360867 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360897 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360915 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360928 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360942 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360955 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360971 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360984 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.360992 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361000 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361007 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361015 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361022 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361030 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361036 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361043 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361053 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361064 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361072 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361080 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361087 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361094 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361102 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361109 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361116 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361123 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361130 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361148 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361150 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361701 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361718 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361725 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361730 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361735 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361740 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361745 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361750 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361755 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361774 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xjcxh" event={"ID":"4e873158-22c6-4eab-9cb1-438b0f50f46d","Type":"ContainerDied","Data":"302d21980ff0ae7b7085fe86876f26cfc77388b00034166305f31ba33679beda"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361799 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361805 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361810 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361815 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361820 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361825 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361830 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361835 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361841 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.361909 4863 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.376170 4863 scope.go:117] "RemoveContainer" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.470433 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.499196 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xjcxh"] Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.504680 4863 scope.go:117] "RemoveContainer" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.506790 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xjcxh"] Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.517536 4863 scope.go:117] "RemoveContainer" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.532769 4863 scope.go:117] "RemoveContainer" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.547196 4863 scope.go:117] "RemoveContainer" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.570706 4863 scope.go:117] "RemoveContainer" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.591398 4863 scope.go:117] "RemoveContainer" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.608770 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e873158-22c6-4eab-9cb1-438b0f50f46d" path="/var/lib/kubelet/pods/4e873158-22c6-4eab-9cb1-438b0f50f46d/volumes" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.618825 4863 scope.go:117] "RemoveContainer" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.641330 4863 scope.go:117] "RemoveContainer" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.666071 4863 scope.go:117] "RemoveContainer" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.666598 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": container with ID starting with 313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95 not found: ID does not exist" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.666643 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} err="failed to get container status \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": rpc error: code = NotFound desc = could not find container \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": container with ID starting with 313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.666679 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.667109 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": container with ID starting with c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014 not found: ID does not exist" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.667191 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} err="failed to get container status \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": rpc error: code = NotFound desc = could not find container \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": container with ID starting with c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.667218 4863 scope.go:117] "RemoveContainer" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.667721 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": container with ID starting with f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f not found: ID does not exist" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.667807 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} err="failed to get container status \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": rpc error: code = NotFound desc = could not find container \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": container with ID starting with f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.667874 4863 scope.go:117] "RemoveContainer" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.668265 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": container with ID starting with aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f not found: ID does not exist" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.668335 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} err="failed to get container status \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": rpc error: code = NotFound desc = could not find container \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": container with ID starting with aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.668362 4863 scope.go:117] "RemoveContainer" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.668827 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": container with ID starting with 8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213 not found: ID does not exist" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.668865 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} err="failed to get container status \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": rpc error: code = NotFound desc = could not find container \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": container with ID starting with 8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.668933 4863 scope.go:117] "RemoveContainer" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.669391 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": container with ID starting with 876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134 not found: ID does not exist" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.669461 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} err="failed to get container status \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": rpc error: code = NotFound desc = could not find container \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": container with ID starting with 876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.669538 4863 scope.go:117] "RemoveContainer" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.670015 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": container with ID starting with 659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39 not found: ID does not exist" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.670061 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} err="failed to get container status \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": rpc error: code = NotFound desc = could not find container \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": container with ID starting with 659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.670096 4863 scope.go:117] "RemoveContainer" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.670522 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": container with ID starting with 7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1 not found: ID does not exist" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.670567 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} err="failed to get container status \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": rpc error: code = NotFound desc = could not find container \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": container with ID starting with 7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.670625 4863 scope.go:117] "RemoveContainer" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.671102 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": container with ID starting with 13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f not found: ID does not exist" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.671166 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} err="failed to get container status \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": rpc error: code = NotFound desc = could not find container \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": container with ID starting with 13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.671191 4863 scope.go:117] "RemoveContainer" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: E1205 06:59:26.671703 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": container with ID starting with 4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22 not found: ID does not exist" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.671767 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} err="failed to get container status \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": rpc error: code = NotFound desc = could not find container \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": container with ID starting with 4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.671787 4863 scope.go:117] "RemoveContainer" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.672406 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} err="failed to get container status \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": rpc error: code = NotFound desc = could not find container \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": container with ID starting with 313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.672531 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.673273 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} err="failed to get container status \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": rpc error: code = NotFound desc = could not find container \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": container with ID starting with c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.673320 4863 scope.go:117] "RemoveContainer" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.673683 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} err="failed to get container status \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": rpc error: code = NotFound desc = could not find container \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": container with ID starting with f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.673713 4863 scope.go:117] "RemoveContainer" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.674272 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} err="failed to get container status \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": rpc error: code = NotFound desc = could not find container \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": container with ID starting with aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.674298 4863 scope.go:117] "RemoveContainer" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.674800 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} err="failed to get container status \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": rpc error: code = NotFound desc = could not find container \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": container with ID starting with 8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.674866 4863 scope.go:117] "RemoveContainer" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.675334 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} err="failed to get container status \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": rpc error: code = NotFound desc = could not find container \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": container with ID starting with 876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.675370 4863 scope.go:117] "RemoveContainer" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.675817 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} err="failed to get container status \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": rpc error: code = NotFound desc = could not find container \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": container with ID starting with 659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.675845 4863 scope.go:117] "RemoveContainer" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.676315 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} err="failed to get container status \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": rpc error: code = NotFound desc = could not find container \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": container with ID starting with 7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.676386 4863 scope.go:117] "RemoveContainer" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.676822 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} err="failed to get container status \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": rpc error: code = NotFound desc = could not find container \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": container with ID starting with 13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.676862 4863 scope.go:117] "RemoveContainer" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.677234 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} err="failed to get container status \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": rpc error: code = NotFound desc = could not find container \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": container with ID starting with 4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.677299 4863 scope.go:117] "RemoveContainer" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.677645 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} err="failed to get container status \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": rpc error: code = NotFound desc = could not find container \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": container with ID starting with 313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.677671 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678014 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} err="failed to get container status \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": rpc error: code = NotFound desc = could not find container \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": container with ID starting with c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678055 4863 scope.go:117] "RemoveContainer" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678410 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} err="failed to get container status \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": rpc error: code = NotFound desc = could not find container \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": container with ID starting with f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678434 4863 scope.go:117] "RemoveContainer" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678866 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} err="failed to get container status \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": rpc error: code = NotFound desc = could not find container \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": container with ID starting with aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.678905 4863 scope.go:117] "RemoveContainer" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.679357 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} err="failed to get container status \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": rpc error: code = NotFound desc = could not find container \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": container with ID starting with 8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.679429 4863 scope.go:117] "RemoveContainer" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.679875 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} err="failed to get container status \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": rpc error: code = NotFound desc = could not find container \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": container with ID starting with 876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.679904 4863 scope.go:117] "RemoveContainer" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.680296 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} err="failed to get container status \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": rpc error: code = NotFound desc = could not find container \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": container with ID starting with 659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.680324 4863 scope.go:117] "RemoveContainer" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.680777 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} err="failed to get container status \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": rpc error: code = NotFound desc = could not find container \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": container with ID starting with 7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.680829 4863 scope.go:117] "RemoveContainer" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.681311 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} err="failed to get container status \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": rpc error: code = NotFound desc = could not find container \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": container with ID starting with 13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.681351 4863 scope.go:117] "RemoveContainer" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.681724 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} err="failed to get container status \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": rpc error: code = NotFound desc = could not find container \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": container with ID starting with 4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.681751 4863 scope.go:117] "RemoveContainer" containerID="313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.682192 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95"} err="failed to get container status \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": rpc error: code = NotFound desc = could not find container \"313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95\": container with ID starting with 313f400d9f220871731d1033d80eb45f5b6daba7163c87f58fcf2a14e8e90a95 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.682220 4863 scope.go:117] "RemoveContainer" containerID="c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.682766 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014"} err="failed to get container status \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": rpc error: code = NotFound desc = could not find container \"c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014\": container with ID starting with c588739c84ba249ea61fff83db32b5f3d0378a43b9d919a71912d06407dae014 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.682839 4863 scope.go:117] "RemoveContainer" containerID="f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.683386 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f"} err="failed to get container status \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": rpc error: code = NotFound desc = could not find container \"f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f\": container with ID starting with f4d021a5d5a2355802681ad68905d4d79a2e868a514a373a7958c7d4c506bb9f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.683449 4863 scope.go:117] "RemoveContainer" containerID="aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.683938 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f"} err="failed to get container status \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": rpc error: code = NotFound desc = could not find container \"aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f\": container with ID starting with aaaaa17c226fc6ea6c8134cd068d09a9b857c1883cd3c694d9fffe488fbde96f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.683974 4863 scope.go:117] "RemoveContainer" containerID="8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.684382 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213"} err="failed to get container status \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": rpc error: code = NotFound desc = could not find container \"8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213\": container with ID starting with 8477c10cc5f4f37ece604667527bf46f9fefa3d909fd44981bc928ba1f02c213 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.684413 4863 scope.go:117] "RemoveContainer" containerID="876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.684833 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134"} err="failed to get container status \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": rpc error: code = NotFound desc = could not find container \"876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134\": container with ID starting with 876bb0f71ffe78e6af3d3a69c226943f686e8cafb1d64830038717fe18150134 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.684860 4863 scope.go:117] "RemoveContainer" containerID="659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685299 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39"} err="failed to get container status \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": rpc error: code = NotFound desc = could not find container \"659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39\": container with ID starting with 659833ae3259036af436141713f806d1ba7b5f1ab5f7d68fa54e269fd33f9a39 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685326 4863 scope.go:117] "RemoveContainer" containerID="7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685634 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1"} err="failed to get container status \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": rpc error: code = NotFound desc = could not find container \"7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1\": container with ID starting with 7f80ac696d6cc68689cc8d3882a874b24776c18bcdcff125083471e933d203a1 not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685658 4863 scope.go:117] "RemoveContainer" containerID="13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685946 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f"} err="failed to get container status \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": rpc error: code = NotFound desc = could not find container \"13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f\": container with ID starting with 13649ebb7619a1fe175232611511e92f5c3000fd86084cdd75b6a77336d1605f not found: ID does not exist" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.685967 4863 scope.go:117] "RemoveContainer" containerID="4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22" Dec 05 06:59:26 crc kubenswrapper[4863]: I1205 06:59:26.686262 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22"} err="failed to get container status \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": rpc error: code = NotFound desc = could not find container \"4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22\": container with ID starting with 4f58ef5c510ec0d40930757f92b7cbc415bb9e81fe543a1beb86b42f15076d22 not found: ID does not exist" Dec 05 06:59:27 crc kubenswrapper[4863]: I1205 06:59:27.385229 4863 generic.go:334] "Generic (PLEG): container finished" podID="a1422717-c4ac-4b18-9ab7-58356eedc6c0" containerID="971efcba644d6f48a5e89838bfb951127da91ea321cfe0afa785eabe21b7db5e" exitCode=0 Dec 05 06:59:27 crc kubenswrapper[4863]: I1205 06:59:27.385426 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerDied","Data":"971efcba644d6f48a5e89838bfb951127da91ea321cfe0afa785eabe21b7db5e"} Dec 05 06:59:27 crc kubenswrapper[4863]: I1205 06:59:27.390871 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vw8fd_b9e2cdef-4a53-4f32-b973-e5d6ba0708db/kube-multus/2.log" Dec 05 06:59:27 crc kubenswrapper[4863]: I1205 06:59:27.390977 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vw8fd" event={"ID":"b9e2cdef-4a53-4f32-b973-e5d6ba0708db","Type":"ContainerStarted","Data":"9d43e666e190a98553690d51ff6f1250ba67a91dfff82108889202ad3ad94c23"} Dec 05 06:59:28 crc kubenswrapper[4863]: I1205 06:59:28.400037 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"4eae26952a0c22b19721f35e30d34bf5ee3e8c34c7680c45846cf63cb25b2af7"} Dec 05 06:59:28 crc kubenswrapper[4863]: I1205 06:59:28.400636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"1cd14262bf206a9d7ccddd30d3ce56fd819409945bfa5af3bbc1e0c2f9855537"} Dec 05 06:59:28 crc kubenswrapper[4863]: I1205 06:59:28.400646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"9f766909fcd371bb7fc67b7dbe60cb68a438b7e4d0018c955126e37129ffa4a1"} Dec 05 06:59:28 crc kubenswrapper[4863]: I1205 06:59:28.400656 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"860930d86c32751300edc89860279646ce2c638a8f0a2134b715bfc6acf3b9b9"} Dec 05 06:59:29 crc kubenswrapper[4863]: I1205 06:59:29.411251 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"ca3b9a58ca9e57dc208c43ca2dcde88cc090e0ba7e2ff5d2d5963b6148cf9b3f"} Dec 05 06:59:29 crc kubenswrapper[4863]: I1205 06:59:29.412569 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"2d00be3601ce61109d9474218a1d73500ec0856381e5290803e123930bdcf2e2"} Dec 05 06:59:29 crc kubenswrapper[4863]: I1205 06:59:29.939687 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:29 crc kubenswrapper[4863]: I1205 06:59:29.939753 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:30 crc kubenswrapper[4863]: I1205 06:59:30.007830 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:30 crc kubenswrapper[4863]: I1205 06:59:30.462502 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:30 crc kubenswrapper[4863]: I1205 06:59:30.522185 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:31 crc kubenswrapper[4863]: I1205 06:59:31.429539 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"cd5fa52b75100abc34158549842ec9cf03500702c6c33220466bc3a410c29f1f"} Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.285344 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5sdgn"] Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.286122 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.288814 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.289309 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.289621 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.290816 4863 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nj7r9" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.435074 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jhtzh" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="registry-server" containerID="cri-o://4baa87a6295715cdfacda2547d4f67c10247c29e83f0ef8ccd49b13ae886b34c" gracePeriod=2 Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.469832 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.469931 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.469962 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q29tf\" (UniqueName: \"kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.571277 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.571322 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.571343 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q29tf\" (UniqueName: \"kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.571657 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.572212 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:32 crc kubenswrapper[4863]: I1205 06:59:32.986729 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q29tf\" (UniqueName: \"kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf\") pod \"crc-storage-crc-5sdgn\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.209690 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:33 crc kubenswrapper[4863]: E1205 06:59:33.247256 4863 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(22ca3bd8e0c3164d44bf121d8d4441371da64cfaeeadbc75a531523dfa434267): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:59:33 crc kubenswrapper[4863]: E1205 06:59:33.247384 4863 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(22ca3bd8e0c3164d44bf121d8d4441371da64cfaeeadbc75a531523dfa434267): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:33 crc kubenswrapper[4863]: E1205 06:59:33.247434 4863 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(22ca3bd8e0c3164d44bf121d8d4441371da64cfaeeadbc75a531523dfa434267): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:33 crc kubenswrapper[4863]: E1205 06:59:33.247567 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-5sdgn_crc-storage(460d9ee9-73c2-4b9e-9056-4ff13b350e64)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-5sdgn_crc-storage(460d9ee9-73c2-4b9e-9056-4ff13b350e64)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(22ca3bd8e0c3164d44bf121d8d4441371da64cfaeeadbc75a531523dfa434267): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-5sdgn" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.666663 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.668436 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.788281 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.788523 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.788753 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4shxs\" (UniqueName: \"kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.890628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.890792 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4shxs\" (UniqueName: \"kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.890850 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.891256 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.891432 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:33 crc kubenswrapper[4863]: I1205 06:59:33.914978 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4shxs\" (UniqueName: \"kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs\") pod \"redhat-operators-46kll\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.005764 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:34 crc kubenswrapper[4863]: E1205 06:59:34.026729 4863 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(c97db0d5433f024409c48342e80be3dc29ce545e32852916544cbcbdede3a87d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:59:34 crc kubenswrapper[4863]: E1205 06:59:34.026822 4863 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(c97db0d5433f024409c48342e80be3dc29ce545e32852916544cbcbdede3a87d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:34 crc kubenswrapper[4863]: E1205 06:59:34.026860 4863 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(c97db0d5433f024409c48342e80be3dc29ce545e32852916544cbcbdede3a87d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:34 crc kubenswrapper[4863]: E1205 06:59:34.026924 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-46kll_openshift-marketplace(c6c3cc6f-47cd-484a-a045-07ca0f492121)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-46kll_openshift-marketplace(c6c3cc6f-47cd-484a-a045-07ca0f492121)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(c97db0d5433f024409c48342e80be3dc29ce545e32852916544cbcbdede3a87d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-46kll" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.454013 4863 generic.go:334] "Generic (PLEG): container finished" podID="409adf73-2361-40af-be87-7b4c283b5540" containerID="4baa87a6295715cdfacda2547d4f67c10247c29e83f0ef8ccd49b13ae886b34c" exitCode=0 Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.454084 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerDied","Data":"4baa87a6295715cdfacda2547d4f67c10247c29e83f0ef8ccd49b13ae886b34c"} Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.458968 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" event={"ID":"a1422717-c4ac-4b18-9ab7-58356eedc6c0","Type":"ContainerStarted","Data":"79ac835f6fe78bd66f60f54ac6da3140a7aaa838ae1f8c034a3808197cb8ea52"} Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.459507 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.459538 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.459550 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.489926 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.492178 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:34 crc kubenswrapper[4863]: I1205 06:59:34.500610 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" podStartSLOduration=9.500595864 podStartE2EDuration="9.500595864s" podCreationTimestamp="2025-12-05 06:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:59:34.499429477 +0000 UTC m=+802.225426517" watchObservedRunningTime="2025-12-05 06:59:34.500595864 +0000 UTC m=+802.226592904" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.020350 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5sdgn"] Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.020508 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.020921 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.054832 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.054946 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.055363 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.073585 4863 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(134c4907d6f50e7587ba33d073be9c98994d77637a3554c8fd1d98dcb359b717): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.073643 4863 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(134c4907d6f50e7587ba33d073be9c98994d77637a3554c8fd1d98dcb359b717): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.073660 4863 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(134c4907d6f50e7587ba33d073be9c98994d77637a3554c8fd1d98dcb359b717): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.073699 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-5sdgn_crc-storage(460d9ee9-73c2-4b9e-9056-4ff13b350e64)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-5sdgn_crc-storage(460d9ee9-73c2-4b9e-9056-4ff13b350e64)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5sdgn_crc-storage_460d9ee9-73c2-4b9e-9056-4ff13b350e64_0(134c4907d6f50e7587ba33d073be9c98994d77637a3554c8fd1d98dcb359b717): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-5sdgn" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.085867 4863 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(eb95d336e7a44c001f42fff49f0d7361b6c4287dcffc62a6caaacfcdf0db0fff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.085925 4863 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(eb95d336e7a44c001f42fff49f0d7361b6c4287dcffc62a6caaacfcdf0db0fff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.085953 4863 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(eb95d336e7a44c001f42fff49f0d7361b6c4287dcffc62a6caaacfcdf0db0fff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:35 crc kubenswrapper[4863]: E1205 06:59:35.086002 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-46kll_openshift-marketplace(c6c3cc6f-47cd-484a-a045-07ca0f492121)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-46kll_openshift-marketplace(c6c3cc6f-47cd-484a-a045-07ca0f492121)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-46kll_openshift-marketplace_c6c3cc6f-47cd-484a-a045-07ca0f492121_0(eb95d336e7a44c001f42fff49f0d7361b6c4287dcffc62a6caaacfcdf0db0fff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-46kll" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.122750 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.205722 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities\") pod \"409adf73-2361-40af-be87-7b4c283b5540\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.206214 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tttc9\" (UniqueName: \"kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9\") pod \"409adf73-2361-40af-be87-7b4c283b5540\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.206412 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content\") pod \"409adf73-2361-40af-be87-7b4c283b5540\" (UID: \"409adf73-2361-40af-be87-7b4c283b5540\") " Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.208915 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities" (OuterVolumeSpecName: "utilities") pod "409adf73-2361-40af-be87-7b4c283b5540" (UID: "409adf73-2361-40af-be87-7b4c283b5540"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.227689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9" (OuterVolumeSpecName: "kube-api-access-tttc9") pod "409adf73-2361-40af-be87-7b4c283b5540" (UID: "409adf73-2361-40af-be87-7b4c283b5540"). InnerVolumeSpecName "kube-api-access-tttc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.272208 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "409adf73-2361-40af-be87-7b4c283b5540" (UID: "409adf73-2361-40af-be87-7b4c283b5540"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.307782 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tttc9\" (UniqueName: \"kubernetes.io/projected/409adf73-2361-40af-be87-7b4c283b5540-kube-api-access-tttc9\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.307821 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.307833 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/409adf73-2361-40af-be87-7b4c283b5540-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.465151 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhtzh" event={"ID":"409adf73-2361-40af-be87-7b4c283b5540","Type":"ContainerDied","Data":"1a2dc00ecdfcabb2debc09cd82ec7f983517a52a478b00ed36b19dc7713d3397"} Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.465216 4863 scope.go:117] "RemoveContainer" containerID="4baa87a6295715cdfacda2547d4f67c10247c29e83f0ef8ccd49b13ae886b34c" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.465243 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhtzh" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.481050 4863 scope.go:117] "RemoveContainer" containerID="ce2f26584ac35e9be3355b8e86586d12647cacfeb07c887f04973c4215d7d21b" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.503605 4863 scope.go:117] "RemoveContainer" containerID="a0c8a40f82a2e0e753f89bdae3c468d1fe54e238840773fdf3629ae3baddab2b" Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.506632 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:35 crc kubenswrapper[4863]: I1205 06:59:35.512379 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jhtzh"] Dec 05 06:59:36 crc kubenswrapper[4863]: I1205 06:59:36.614024 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="409adf73-2361-40af-be87-7b4c283b5540" path="/var/lib/kubelet/pods/409adf73-2361-40af-be87-7b4c283b5540/volumes" Dec 05 06:59:38 crc kubenswrapper[4863]: I1205 06:59:38.464067 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:59:38 crc kubenswrapper[4863]: I1205 06:59:38.464157 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:59:38 crc kubenswrapper[4863]: I1205 06:59:38.464219 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 06:59:38 crc kubenswrapper[4863]: I1205 06:59:38.465038 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:59:38 crc kubenswrapper[4863]: I1205 06:59:38.465135 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0" gracePeriod=600 Dec 05 06:59:39 crc kubenswrapper[4863]: I1205 06:59:39.496842 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0" exitCode=0 Dec 05 06:59:39 crc kubenswrapper[4863]: I1205 06:59:39.496946 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0"} Dec 05 06:59:39 crc kubenswrapper[4863]: I1205 06:59:39.497351 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4"} Dec 05 06:59:39 crc kubenswrapper[4863]: I1205 06:59:39.497389 4863 scope.go:117] "RemoveContainer" containerID="3639bef17310673572cc540c397be13fa9e8e790df5aefdf8ed326f56892e55b" Dec 05 06:59:46 crc kubenswrapper[4863]: I1205 06:59:46.601035 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:46 crc kubenswrapper[4863]: I1205 06:59:46.602640 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:46 crc kubenswrapper[4863]: I1205 06:59:46.845240 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.560213 4863 generic.go:334] "Generic (PLEG): container finished" podID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerID="c72b9acc98ed03dcffb2855742762f8376332a95fbc234da0d34129f71dae215" exitCode=0 Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.560301 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerDied","Data":"c72b9acc98ed03dcffb2855742762f8376332a95fbc234da0d34129f71dae215"} Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.560542 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerStarted","Data":"5ab32cac844ca7ffe788939b3dc36b78b0bfa1c11779b151109873089bdaa961"} Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.602088 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.603043 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:47 crc kubenswrapper[4863]: I1205 06:59:47.861871 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5sdgn"] Dec 05 06:59:48 crc kubenswrapper[4863]: I1205 06:59:48.566965 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5sdgn" event={"ID":"460d9ee9-73c2-4b9e-9056-4ff13b350e64","Type":"ContainerStarted","Data":"db6308c3660f15e001e020085f65c138de3f09fe08144825df445ef536fc2a07"} Dec 05 06:59:48 crc kubenswrapper[4863]: I1205 06:59:48.568960 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerStarted","Data":"e78d6497ac7eaae12055098590cf9066d16d8c4c515535c595b19ba486708fb4"} Dec 05 06:59:49 crc kubenswrapper[4863]: I1205 06:59:49.580028 4863 generic.go:334] "Generic (PLEG): container finished" podID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerID="e78d6497ac7eaae12055098590cf9066d16d8c4c515535c595b19ba486708fb4" exitCode=0 Dec 05 06:59:49 crc kubenswrapper[4863]: I1205 06:59:49.580126 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerDied","Data":"e78d6497ac7eaae12055098590cf9066d16d8c4c515535c595b19ba486708fb4"} Dec 05 06:59:49 crc kubenswrapper[4863]: I1205 06:59:49.587110 4863 generic.go:334] "Generic (PLEG): container finished" podID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" containerID="4311b5859a4cf10ce2f96dd92ed9ea11a86b3f8963046c1d92a718a1b11a058e" exitCode=0 Dec 05 06:59:49 crc kubenswrapper[4863]: I1205 06:59:49.587162 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5sdgn" event={"ID":"460d9ee9-73c2-4b9e-9056-4ff13b350e64","Type":"ContainerDied","Data":"4311b5859a4cf10ce2f96dd92ed9ea11a86b3f8963046c1d92a718a1b11a058e"} Dec 05 06:59:50 crc kubenswrapper[4863]: I1205 06:59:50.597582 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerStarted","Data":"dbc3dcc2eb0d6421cdee560a8fc5f7908d89ac93af270b4a7181672d27ec020e"} Dec 05 06:59:50 crc kubenswrapper[4863]: I1205 06:59:50.630027 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-46kll" podStartSLOduration=15.218827769 podStartE2EDuration="17.630001646s" podCreationTimestamp="2025-12-05 06:59:33 +0000 UTC" firstStartedPulling="2025-12-05 06:59:47.562726971 +0000 UTC m=+815.288724051" lastFinishedPulling="2025-12-05 06:59:49.973900848 +0000 UTC m=+817.699897928" observedRunningTime="2025-12-05 06:59:50.620505957 +0000 UTC m=+818.346503037" watchObservedRunningTime="2025-12-05 06:59:50.630001646 +0000 UTC m=+818.355998736" Dec 05 06:59:50 crc kubenswrapper[4863]: I1205 06:59:50.937915 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.055452 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt\") pod \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.055578 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage\") pod \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.055663 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q29tf\" (UniqueName: \"kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf\") pod \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\" (UID: \"460d9ee9-73c2-4b9e-9056-4ff13b350e64\") " Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.055665 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "460d9ee9-73c2-4b9e-9056-4ff13b350e64" (UID: "460d9ee9-73c2-4b9e-9056-4ff13b350e64"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.055901 4863 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/460d9ee9-73c2-4b9e-9056-4ff13b350e64-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.062745 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf" (OuterVolumeSpecName: "kube-api-access-q29tf") pod "460d9ee9-73c2-4b9e-9056-4ff13b350e64" (UID: "460d9ee9-73c2-4b9e-9056-4ff13b350e64"). InnerVolumeSpecName "kube-api-access-q29tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.076372 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "460d9ee9-73c2-4b9e-9056-4ff13b350e64" (UID: "460d9ee9-73c2-4b9e-9056-4ff13b350e64"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.157282 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q29tf\" (UniqueName: \"kubernetes.io/projected/460d9ee9-73c2-4b9e-9056-4ff13b350e64-kube-api-access-q29tf\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.157543 4863 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/460d9ee9-73c2-4b9e-9056-4ff13b350e64-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.606719 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5sdgn" Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.606718 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5sdgn" event={"ID":"460d9ee9-73c2-4b9e-9056-4ff13b350e64","Type":"ContainerDied","Data":"db6308c3660f15e001e020085f65c138de3f09fe08144825df445ef536fc2a07"} Dec 05 06:59:51 crc kubenswrapper[4863]: I1205 06:59:51.606781 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db6308c3660f15e001e020085f65c138de3f09fe08144825df445ef536fc2a07" Dec 05 06:59:54 crc kubenswrapper[4863]: I1205 06:59:54.005989 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:54 crc kubenswrapper[4863]: I1205 06:59:54.007671 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 06:59:55 crc kubenswrapper[4863]: I1205 06:59:55.069385 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-46kll" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="registry-server" probeResult="failure" output=< Dec 05 06:59:55 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 06:59:55 crc kubenswrapper[4863]: > Dec 05 06:59:56 crc kubenswrapper[4863]: I1205 06:59:56.290277 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q6wt9" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872171 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7"] Dec 05 06:59:58 crc kubenswrapper[4863]: E1205 06:59:58.872801 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" containerName="storage" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872816 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" containerName="storage" Dec 05 06:59:58 crc kubenswrapper[4863]: E1205 06:59:58.872829 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="extract-utilities" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872837 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="extract-utilities" Dec 05 06:59:58 crc kubenswrapper[4863]: E1205 06:59:58.872852 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="registry-server" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872862 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="registry-server" Dec 05 06:59:58 crc kubenswrapper[4863]: E1205 06:59:58.872873 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="extract-content" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872881 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="extract-content" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872982 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="409adf73-2361-40af-be87-7b4c283b5540" containerName="registry-server" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.872998 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" containerName="storage" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.873831 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.876378 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.895094 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7"] Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.966513 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz9x4\" (UniqueName: \"kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.966589 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:58 crc kubenswrapper[4863]: I1205 06:59:58.966659 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.068359 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.068609 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.069359 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.069419 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.069565 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz9x4\" (UniqueName: \"kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.105423 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz9x4\" (UniqueName: \"kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.195727 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.469870 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7"] Dec 05 06:59:59 crc kubenswrapper[4863]: W1205 06:59:59.475951 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda89bb643_8e8e_4e92_9faf_e3a114c3d070.slice/crio-3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4 WatchSource:0}: Error finding container 3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4: Status 404 returned error can't find the container with id 3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4 Dec 05 06:59:59 crc kubenswrapper[4863]: I1205 06:59:59.656003 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerStarted","Data":"3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4"} Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.178967 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh"] Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.180281 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.183116 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.183424 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.212793 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh"] Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.293606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzp4b\" (UniqueName: \"kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.293881 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.294010 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.395281 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzp4b\" (UniqueName: \"kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.395346 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.395455 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.397725 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.406308 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.418617 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzp4b\" (UniqueName: \"kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b\") pod \"collect-profiles-29415300-b4tdh\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.505489 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.677570 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerStarted","Data":"2f96d27c587005f150bd9b5db3e43b1e9528869d1cf245b4d9ede6208b004889"} Dec 05 07:00:00 crc kubenswrapper[4863]: I1205 07:00:00.990737 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh"] Dec 05 07:00:01 crc kubenswrapper[4863]: I1205 07:00:01.687605 4863 generic.go:334] "Generic (PLEG): container finished" podID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerID="2f96d27c587005f150bd9b5db3e43b1e9528869d1cf245b4d9ede6208b004889" exitCode=0 Dec 05 07:00:01 crc kubenswrapper[4863]: I1205 07:00:01.687723 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerDied","Data":"2f96d27c587005f150bd9b5db3e43b1e9528869d1cf245b4d9ede6208b004889"} Dec 05 07:00:01 crc kubenswrapper[4863]: I1205 07:00:01.691140 4863 generic.go:334] "Generic (PLEG): container finished" podID="83505046-0a0e-45a7-9b31-ba9854f03e00" containerID="1165bf6fe410c69e74b802bdee134934a25c0e41f0c35d72251eda196de0df35" exitCode=0 Dec 05 07:00:01 crc kubenswrapper[4863]: I1205 07:00:01.691262 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" event={"ID":"83505046-0a0e-45a7-9b31-ba9854f03e00","Type":"ContainerDied","Data":"1165bf6fe410c69e74b802bdee134934a25c0e41f0c35d72251eda196de0df35"} Dec 05 07:00:01 crc kubenswrapper[4863]: I1205 07:00:01.691675 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" event={"ID":"83505046-0a0e-45a7-9b31-ba9854f03e00","Type":"ContainerStarted","Data":"a722678df8568ccf38395475df05049942f9da57aa9c794f79f67e76f3638cd8"} Dec 05 07:00:02 crc kubenswrapper[4863]: I1205 07:00:02.992835 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.133046 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume\") pod \"83505046-0a0e-45a7-9b31-ba9854f03e00\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.133382 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzp4b\" (UniqueName: \"kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b\") pod \"83505046-0a0e-45a7-9b31-ba9854f03e00\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.133420 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume\") pod \"83505046-0a0e-45a7-9b31-ba9854f03e00\" (UID: \"83505046-0a0e-45a7-9b31-ba9854f03e00\") " Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.135034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume" (OuterVolumeSpecName: "config-volume") pod "83505046-0a0e-45a7-9b31-ba9854f03e00" (UID: "83505046-0a0e-45a7-9b31-ba9854f03e00"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.141304 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b" (OuterVolumeSpecName: "kube-api-access-fzp4b") pod "83505046-0a0e-45a7-9b31-ba9854f03e00" (UID: "83505046-0a0e-45a7-9b31-ba9854f03e00"). InnerVolumeSpecName "kube-api-access-fzp4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.141404 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "83505046-0a0e-45a7-9b31-ba9854f03e00" (UID: "83505046-0a0e-45a7-9b31-ba9854f03e00"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.234880 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83505046-0a0e-45a7-9b31-ba9854f03e00-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.234921 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83505046-0a0e-45a7-9b31-ba9854f03e00-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.234931 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzp4b\" (UniqueName: \"kubernetes.io/projected/83505046-0a0e-45a7-9b31-ba9854f03e00-kube-api-access-fzp4b\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.710355 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" event={"ID":"83505046-0a0e-45a7-9b31-ba9854f03e00","Type":"ContainerDied","Data":"a722678df8568ccf38395475df05049942f9da57aa9c794f79f67e76f3638cd8"} Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.710412 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a722678df8568ccf38395475df05049942f9da57aa9c794f79f67e76f3638cd8" Dec 05 07:00:03 crc kubenswrapper[4863]: I1205 07:00:03.710439 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh" Dec 05 07:00:04 crc kubenswrapper[4863]: I1205 07:00:04.076398 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 07:00:04 crc kubenswrapper[4863]: I1205 07:00:04.140375 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 07:00:06 crc kubenswrapper[4863]: I1205 07:00:06.149786 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 07:00:06 crc kubenswrapper[4863]: I1205 07:00:06.150769 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-46kll" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="registry-server" containerID="cri-o://dbc3dcc2eb0d6421cdee560a8fc5f7908d89ac93af270b4a7181672d27ec020e" gracePeriod=2 Dec 05 07:00:06 crc kubenswrapper[4863]: I1205 07:00:06.735322 4863 generic.go:334] "Generic (PLEG): container finished" podID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerID="dbc3dcc2eb0d6421cdee560a8fc5f7908d89ac93af270b4a7181672d27ec020e" exitCode=0 Dec 05 07:00:06 crc kubenswrapper[4863]: I1205 07:00:06.735388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerDied","Data":"dbc3dcc2eb0d6421cdee560a8fc5f7908d89ac93af270b4a7181672d27ec020e"} Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.087250 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.200764 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content\") pod \"c6c3cc6f-47cd-484a-a045-07ca0f492121\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.200885 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4shxs\" (UniqueName: \"kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs\") pod \"c6c3cc6f-47cd-484a-a045-07ca0f492121\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.201005 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities\") pod \"c6c3cc6f-47cd-484a-a045-07ca0f492121\" (UID: \"c6c3cc6f-47cd-484a-a045-07ca0f492121\") " Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.203065 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities" (OuterVolumeSpecName: "utilities") pod "c6c3cc6f-47cd-484a-a045-07ca0f492121" (UID: "c6c3cc6f-47cd-484a-a045-07ca0f492121"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.209586 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs" (OuterVolumeSpecName: "kube-api-access-4shxs") pod "c6c3cc6f-47cd-484a-a045-07ca0f492121" (UID: "c6c3cc6f-47cd-484a-a045-07ca0f492121"). InnerVolumeSpecName "kube-api-access-4shxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.302384 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.302420 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4shxs\" (UniqueName: \"kubernetes.io/projected/c6c3cc6f-47cd-484a-a045-07ca0f492121-kube-api-access-4shxs\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.358715 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6c3cc6f-47cd-484a-a045-07ca0f492121" (UID: "c6c3cc6f-47cd-484a-a045-07ca0f492121"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.403686 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c3cc6f-47cd-484a-a045-07ca0f492121-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.745734 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46kll" event={"ID":"c6c3cc6f-47cd-484a-a045-07ca0f492121","Type":"ContainerDied","Data":"5ab32cac844ca7ffe788939b3dc36b78b0bfa1c11779b151109873089bdaa961"} Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.745800 4863 scope.go:117] "RemoveContainer" containerID="dbc3dcc2eb0d6421cdee560a8fc5f7908d89ac93af270b4a7181672d27ec020e" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.745958 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46kll" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.778649 4863 scope.go:117] "RemoveContainer" containerID="e78d6497ac7eaae12055098590cf9066d16d8c4c515535c595b19ba486708fb4" Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.798272 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.806565 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-46kll"] Dec 05 07:00:07 crc kubenswrapper[4863]: I1205 07:00:07.814211 4863 scope.go:117] "RemoveContainer" containerID="c72b9acc98ed03dcffb2855742762f8376332a95fbc234da0d34129f71dae215" Dec 05 07:00:08 crc kubenswrapper[4863]: I1205 07:00:08.613200 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" path="/var/lib/kubelet/pods/c6c3cc6f-47cd-484a-a045-07ca0f492121/volumes" Dec 05 07:00:12 crc kubenswrapper[4863]: I1205 07:00:12.784044 4863 generic.go:334] "Generic (PLEG): container finished" podID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerID="53098061858c1551eb3a2b5dcd5abd3df9ef46dab7419259e90f1bb715f0c648" exitCode=0 Dec 05 07:00:12 crc kubenswrapper[4863]: I1205 07:00:12.784140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerDied","Data":"53098061858c1551eb3a2b5dcd5abd3df9ef46dab7419259e90f1bb715f0c648"} Dec 05 07:00:13 crc kubenswrapper[4863]: I1205 07:00:13.792663 4863 generic.go:334] "Generic (PLEG): container finished" podID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerID="d7ad48c878c48fa32b7c787a459b553649280aa39d388641a67d083c3ce51284" exitCode=0 Dec 05 07:00:13 crc kubenswrapper[4863]: I1205 07:00:13.792738 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerDied","Data":"d7ad48c878c48fa32b7c787a459b553649280aa39d388641a67d083c3ce51284"} Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.097943 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.204903 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util\") pod \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.205121 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle\") pod \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.205209 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz9x4\" (UniqueName: \"kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4\") pod \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\" (UID: \"a89bb643-8e8e-4e92-9faf-e3a114c3d070\") " Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.206133 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle" (OuterVolumeSpecName: "bundle") pod "a89bb643-8e8e-4e92-9faf-e3a114c3d070" (UID: "a89bb643-8e8e-4e92-9faf-e3a114c3d070"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.213727 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4" (OuterVolumeSpecName: "kube-api-access-jz9x4") pod "a89bb643-8e8e-4e92-9faf-e3a114c3d070" (UID: "a89bb643-8e8e-4e92-9faf-e3a114c3d070"). InnerVolumeSpecName "kube-api-access-jz9x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.224409 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util" (OuterVolumeSpecName: "util") pod "a89bb643-8e8e-4e92-9faf-e3a114c3d070" (UID: "a89bb643-8e8e-4e92-9faf-e3a114c3d070"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.306938 4863 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-util\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.306995 4863 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a89bb643-8e8e-4e92-9faf-e3a114c3d070-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.307016 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz9x4\" (UniqueName: \"kubernetes.io/projected/a89bb643-8e8e-4e92-9faf-e3a114c3d070-kube-api-access-jz9x4\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.813764 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" event={"ID":"a89bb643-8e8e-4e92-9faf-e3a114c3d070","Type":"ContainerDied","Data":"3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4"} Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.813833 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ae180a75f70ee0be71b5f19d797e340337ac84f53fac916e06596fbd8d70bf4" Dec 05 07:00:15 crc kubenswrapper[4863]: I1205 07:00:15.813859 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.585384 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n"] Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586354 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="registry-server" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586376 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="registry-server" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586388 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83505046-0a0e-45a7-9b31-ba9854f03e00" containerName="collect-profiles" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586396 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="83505046-0a0e-45a7-9b31-ba9854f03e00" containerName="collect-profiles" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586410 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="extract" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586421 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="extract" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586431 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="util" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586438 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="util" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586451 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="extract-utilities" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586459 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="extract-utilities" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586498 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="extract-content" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586510 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="extract-content" Dec 05 07:00:20 crc kubenswrapper[4863]: E1205 07:00:20.586524 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="pull" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586531 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="pull" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586663 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="83505046-0a0e-45a7-9b31-ba9854f03e00" containerName="collect-profiles" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586680 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a89bb643-8e8e-4e92-9faf-e3a114c3d070" containerName="extract" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.586699 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6c3cc6f-47cd-484a-a045-07ca0f492121" containerName="registry-server" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.587256 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.589887 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.589907 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.590389 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-dxvdc" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.595782 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n"] Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.683046 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg76l\" (UniqueName: \"kubernetes.io/projected/fa697669-6ae2-4536-aaf9-77d05ac2251e-kube-api-access-tg76l\") pod \"nmstate-operator-5b5b58f5c8-48k4n\" (UID: \"fa697669-6ae2-4536-aaf9-77d05ac2251e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.784740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg76l\" (UniqueName: \"kubernetes.io/projected/fa697669-6ae2-4536-aaf9-77d05ac2251e-kube-api-access-tg76l\") pod \"nmstate-operator-5b5b58f5c8-48k4n\" (UID: \"fa697669-6ae2-4536-aaf9-77d05ac2251e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.807380 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg76l\" (UniqueName: \"kubernetes.io/projected/fa697669-6ae2-4536-aaf9-77d05ac2251e-kube-api-access-tg76l\") pod \"nmstate-operator-5b5b58f5c8-48k4n\" (UID: \"fa697669-6ae2-4536-aaf9-77d05ac2251e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" Dec 05 07:00:20 crc kubenswrapper[4863]: I1205 07:00:20.915494 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" Dec 05 07:00:21 crc kubenswrapper[4863]: I1205 07:00:21.163592 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n"] Dec 05 07:00:21 crc kubenswrapper[4863]: I1205 07:00:21.851903 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" event={"ID":"fa697669-6ae2-4536-aaf9-77d05ac2251e","Type":"ContainerStarted","Data":"5b67474dda74328d0577309a13dfc2ef8f96f1da26377c11c2431be6b08e6971"} Dec 05 07:00:23 crc kubenswrapper[4863]: I1205 07:00:23.882797 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" event={"ID":"fa697669-6ae2-4536-aaf9-77d05ac2251e","Type":"ContainerStarted","Data":"17381fe89ed9b4a02fd72c41be4823190835d5f4fc3a68cf17c9cf64e2ea3d58"} Dec 05 07:00:23 crc kubenswrapper[4863]: I1205 07:00:23.907783 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-48k4n" podStartSLOduration=1.755799529 podStartE2EDuration="3.907755153s" podCreationTimestamp="2025-12-05 07:00:20 +0000 UTC" firstStartedPulling="2025-12-05 07:00:21.183782054 +0000 UTC m=+848.909779084" lastFinishedPulling="2025-12-05 07:00:23.335737668 +0000 UTC m=+851.061734708" observedRunningTime="2025-12-05 07:00:23.90426295 +0000 UTC m=+851.630260020" watchObservedRunningTime="2025-12-05 07:00:23.907755153 +0000 UTC m=+851.633752233" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.619057 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.622290 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.628805 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-k9l55" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.631271 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.632177 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.638279 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.640926 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.648832 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.660984 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-q9rlj"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.662650 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.715773 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7688\" (UniqueName: \"kubernetes.io/projected/c68996bf-ad3d-4133-8435-f79683625548-kube-api-access-l7688\") pod \"nmstate-metrics-7f946cbc9-9b5nh\" (UID: \"c68996bf-ad3d-4133-8435-f79683625548\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.715813 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.715874 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjv5p\" (UniqueName: \"kubernetes.io/projected/7816923d-7529-4ac1-830f-0bba65fe40f5-kube-api-access-kjv5p\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.756949 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.757685 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.759193 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.759614 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.790264 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-xd8tq" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817532 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjv5p\" (UniqueName: \"kubernetes.io/projected/7816923d-7529-4ac1-830f-0bba65fe40f5-kube-api-access-kjv5p\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-nmstate-lock\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817639 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-ovs-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817696 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2gnp\" (UniqueName: \"kubernetes.io/projected/55fadd2e-07b7-426a-8bb8-697beb4c6209-kube-api-access-m2gnp\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-dbus-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817787 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.817811 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7688\" (UniqueName: \"kubernetes.io/projected/c68996bf-ad3d-4133-8435-f79683625548-kube-api-access-l7688\") pod \"nmstate-metrics-7f946cbc9-9b5nh\" (UID: \"c68996bf-ad3d-4133-8435-f79683625548\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" Dec 05 07:00:29 crc kubenswrapper[4863]: E1205 07:00:29.817943 4863 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 05 07:00:29 crc kubenswrapper[4863]: E1205 07:00:29.818008 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair podName:7816923d-7529-4ac1-830f-0bba65fe40f5 nodeName:}" failed. No retries permitted until 2025-12-05 07:00:30.317989219 +0000 UTC m=+858.043986259 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-pfmd9" (UID: "7816923d-7529-4ac1-830f-0bba65fe40f5") : secret "openshift-nmstate-webhook" not found Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.821143 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.842329 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjv5p\" (UniqueName: \"kubernetes.io/projected/7816923d-7529-4ac1-830f-0bba65fe40f5-kube-api-access-kjv5p\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.846102 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7688\" (UniqueName: \"kubernetes.io/projected/c68996bf-ad3d-4133-8435-f79683625548-kube-api-access-l7688\") pod \"nmstate-metrics-7f946cbc9-9b5nh\" (UID: \"c68996bf-ad3d-4133-8435-f79683625548\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919264 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-dbus-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919354 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0d8503fb-1cdd-41ce-94a9-916f603dd90d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919377 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-nmstate-lock\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919398 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8503fb-1cdd-41ce-94a9-916f603dd90d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919429 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-ovs-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919504 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2gnp\" (UniqueName: \"kubernetes.io/projected/55fadd2e-07b7-426a-8bb8-697beb4c6209-kube-api-access-m2gnp\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919533 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4xrv\" (UniqueName: \"kubernetes.io/projected/0d8503fb-1cdd-41ce-94a9-916f603dd90d-kube-api-access-m4xrv\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919859 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-nmstate-lock\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919882 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-dbus-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.919928 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55fadd2e-07b7-426a-8bb8-697beb4c6209-ovs-socket\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.939181 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6b59954f46-zrbxm"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.939980 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.940328 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2gnp\" (UniqueName: \"kubernetes.io/projected/55fadd2e-07b7-426a-8bb8-697beb4c6209-kube-api-access-m2gnp\") pod \"nmstate-handler-q9rlj\" (UID: \"55fadd2e-07b7-426a-8bb8-697beb4c6209\") " pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.954067 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.957684 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6b59954f46-zrbxm"] Dec 05 07:00:29 crc kubenswrapper[4863]: I1205 07:00:29.986056 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020334 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxgrw\" (UniqueName: \"kubernetes.io/projected/35ffd6fe-f10b-4025-9339-2c279b2fbccb-kube-api-access-rxgrw\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020579 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-trusted-ca-bundle\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020674 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020705 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-oauth-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020751 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-service-ca\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020830 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0d8503fb-1cdd-41ce-94a9-916f603dd90d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020861 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8503fb-1cdd-41ce-94a9-916f603dd90d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020938 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.020992 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-oauth-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.021035 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4xrv\" (UniqueName: \"kubernetes.io/projected/0d8503fb-1cdd-41ce-94a9-916f603dd90d-kube-api-access-m4xrv\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.021811 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0d8503fb-1cdd-41ce-94a9-916f603dd90d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.026824 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8503fb-1cdd-41ce-94a9-916f603dd90d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.043060 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4xrv\" (UniqueName: \"kubernetes.io/projected/0d8503fb-1cdd-41ce-94a9-916f603dd90d-kube-api-access-m4xrv\") pod \"nmstate-console-plugin-7fbb5f6569-97tqb\" (UID: \"0d8503fb-1cdd-41ce-94a9-916f603dd90d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.106036 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122132 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-oauth-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122192 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxgrw\" (UniqueName: \"kubernetes.io/projected/35ffd6fe-f10b-4025-9339-2c279b2fbccb-kube-api-access-rxgrw\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122218 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-trusted-ca-bundle\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122273 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122305 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-service-ca\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122327 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-oauth-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.122888 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.123796 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-trusted-ca-bundle\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.124311 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-service-ca\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.125542 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-oauth-config\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.126064 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35ffd6fe-f10b-4025-9339-2c279b2fbccb-console-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.128482 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35ffd6fe-f10b-4025-9339-2c279b2fbccb-oauth-serving-cert\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.140354 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh"] Dec 05 07:00:30 crc kubenswrapper[4863]: W1205 07:00:30.140766 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc68996bf_ad3d_4133_8435_f79683625548.slice/crio-a1f306b7a8340eb5271bca7ab826cc435df1e62f4445a97938aeb5e27c438061 WatchSource:0}: Error finding container a1f306b7a8340eb5271bca7ab826cc435df1e62f4445a97938aeb5e27c438061: Status 404 returned error can't find the container with id a1f306b7a8340eb5271bca7ab826cc435df1e62f4445a97938aeb5e27c438061 Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.145004 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxgrw\" (UniqueName: \"kubernetes.io/projected/35ffd6fe-f10b-4025-9339-2c279b2fbccb-kube-api-access-rxgrw\") pod \"console-6b59954f46-zrbxm\" (UID: \"35ffd6fe-f10b-4025-9339-2c279b2fbccb\") " pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.266960 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.296974 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb"] Dec 05 07:00:30 crc kubenswrapper[4863]: W1205 07:00:30.299819 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d8503fb_1cdd_41ce_94a9_916f603dd90d.slice/crio-35ce9037c72dabc292321b5e72ca9febe971726b351edc8934b18004bac5777d WatchSource:0}: Error finding container 35ce9037c72dabc292321b5e72ca9febe971726b351edc8934b18004bac5777d: Status 404 returned error can't find the container with id 35ce9037c72dabc292321b5e72ca9febe971726b351edc8934b18004bac5777d Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.325941 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.329076 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7816923d-7529-4ac1-830f-0bba65fe40f5-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pfmd9\" (UID: \"7816923d-7529-4ac1-830f-0bba65fe40f5\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.491962 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6b59954f46-zrbxm"] Dec 05 07:00:30 crc kubenswrapper[4863]: W1205 07:00:30.498686 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35ffd6fe_f10b_4025_9339_2c279b2fbccb.slice/crio-5f66293f6468e26a3ab130bb80f66d10fb077844857e9f6de024aa94c6421bc8 WatchSource:0}: Error finding container 5f66293f6468e26a3ab130bb80f66d10fb077844857e9f6de024aa94c6421bc8: Status 404 returned error can't find the container with id 5f66293f6468e26a3ab130bb80f66d10fb077844857e9f6de024aa94c6421bc8 Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.566559 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.783998 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9"] Dec 05 07:00:30 crc kubenswrapper[4863]: W1205 07:00:30.791866 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7816923d_7529_4ac1_830f_0bba65fe40f5.slice/crio-9c21e6e658162879562b1f7be52cfecffe45bc87d8d3d7f4ae3f5c7e7c00df5b WatchSource:0}: Error finding container 9c21e6e658162879562b1f7be52cfecffe45bc87d8d3d7f4ae3f5c7e7c00df5b: Status 404 returned error can't find the container with id 9c21e6e658162879562b1f7be52cfecffe45bc87d8d3d7f4ae3f5c7e7c00df5b Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.921280 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" event={"ID":"c68996bf-ad3d-4133-8435-f79683625548","Type":"ContainerStarted","Data":"a1f306b7a8340eb5271bca7ab826cc435df1e62f4445a97938aeb5e27c438061"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.922724 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6b59954f46-zrbxm" event={"ID":"35ffd6fe-f10b-4025-9339-2c279b2fbccb","Type":"ContainerStarted","Data":"dc4a84e205cf9e489b8763705225307ef3e09b60065cb72df9658357f3ceb72e"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.922757 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6b59954f46-zrbxm" event={"ID":"35ffd6fe-f10b-4025-9339-2c279b2fbccb","Type":"ContainerStarted","Data":"5f66293f6468e26a3ab130bb80f66d10fb077844857e9f6de024aa94c6421bc8"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.924315 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" event={"ID":"7816923d-7529-4ac1-830f-0bba65fe40f5","Type":"ContainerStarted","Data":"9c21e6e658162879562b1f7be52cfecffe45bc87d8d3d7f4ae3f5c7e7c00df5b"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.925657 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" event={"ID":"0d8503fb-1cdd-41ce-94a9-916f603dd90d","Type":"ContainerStarted","Data":"35ce9037c72dabc292321b5e72ca9febe971726b351edc8934b18004bac5777d"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.926789 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-q9rlj" event={"ID":"55fadd2e-07b7-426a-8bb8-697beb4c6209","Type":"ContainerStarted","Data":"02f7bb3e2dcf16dd988a0dce6b2312ac950938bc5f65380d2b7b7b08886ebdd4"} Dec 05 07:00:30 crc kubenswrapper[4863]: I1205 07:00:30.950060 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6b59954f46-zrbxm" podStartSLOduration=1.950044161 podStartE2EDuration="1.950044161s" podCreationTimestamp="2025-12-05 07:00:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:00:30.939671702 +0000 UTC m=+858.665668762" watchObservedRunningTime="2025-12-05 07:00:30.950044161 +0000 UTC m=+858.676041191" Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.947728 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" event={"ID":"7816923d-7529-4ac1-830f-0bba65fe40f5","Type":"ContainerStarted","Data":"56104ff6b31ca8c0b04f3ee972fd34c2272787437195d3cb19afb2f2a7b86567"} Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.948512 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.950666 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" event={"ID":"0d8503fb-1cdd-41ce-94a9-916f603dd90d","Type":"ContainerStarted","Data":"03d557d30b28cd51d7181f7f62d9537678e5ed5ccc8bd328fc1b8ab2f228314c"} Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.955294 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-q9rlj" event={"ID":"55fadd2e-07b7-426a-8bb8-697beb4c6209","Type":"ContainerStarted","Data":"2729a73e7c9caf51c255e9bc494d6ed611202fb1ee3da59aa6541d3b60ce3bc4"} Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.955448 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.957138 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" event={"ID":"c68996bf-ad3d-4133-8435-f79683625548","Type":"ContainerStarted","Data":"09d93157853febd079e6f5cc2a1f145c8670b8fb783f72a0cd84e1fe681e0c54"} Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.973205 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" podStartSLOduration=2.9256371740000002 podStartE2EDuration="4.973186581s" podCreationTimestamp="2025-12-05 07:00:29 +0000 UTC" firstStartedPulling="2025-12-05 07:00:30.793773706 +0000 UTC m=+858.519770746" lastFinishedPulling="2025-12-05 07:00:32.841323103 +0000 UTC m=+860.567320153" observedRunningTime="2025-12-05 07:00:33.971097351 +0000 UTC m=+861.697094441" watchObservedRunningTime="2025-12-05 07:00:33.973186581 +0000 UTC m=+861.699183631" Dec 05 07:00:33 crc kubenswrapper[4863]: I1205 07:00:33.991549 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-q9rlj" podStartSLOduration=2.170147625 podStartE2EDuration="4.991526813s" podCreationTimestamp="2025-12-05 07:00:29 +0000 UTC" firstStartedPulling="2025-12-05 07:00:30.011944752 +0000 UTC m=+857.737941802" lastFinishedPulling="2025-12-05 07:00:32.83332395 +0000 UTC m=+860.559320990" observedRunningTime="2025-12-05 07:00:33.989365701 +0000 UTC m=+861.715362781" watchObservedRunningTime="2025-12-05 07:00:33.991526813 +0000 UTC m=+861.717523873" Dec 05 07:00:34 crc kubenswrapper[4863]: I1205 07:00:34.023555 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-97tqb" podStartSLOduration=2.485326522 podStartE2EDuration="5.023534504s" podCreationTimestamp="2025-12-05 07:00:29 +0000 UTC" firstStartedPulling="2025-12-05 07:00:30.301906331 +0000 UTC m=+858.027903371" lastFinishedPulling="2025-12-05 07:00:32.840114313 +0000 UTC m=+860.566111353" observedRunningTime="2025-12-05 07:00:34.016651198 +0000 UTC m=+861.742648248" watchObservedRunningTime="2025-12-05 07:00:34.023534504 +0000 UTC m=+861.749531544" Dec 05 07:00:35 crc kubenswrapper[4863]: I1205 07:00:35.970769 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" event={"ID":"c68996bf-ad3d-4133-8435-f79683625548","Type":"ContainerStarted","Data":"3ea558518d922d46fd7b82965a75bb8e561d3002d0b5801eefedd6ffae71285b"} Dec 05 07:00:35 crc kubenswrapper[4863]: I1205 07:00:35.989637 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9b5nh" podStartSLOduration=1.995005565 podStartE2EDuration="6.989621649s" podCreationTimestamp="2025-12-05 07:00:29 +0000 UTC" firstStartedPulling="2025-12-05 07:00:30.142696254 +0000 UTC m=+857.868693294" lastFinishedPulling="2025-12-05 07:00:35.137312338 +0000 UTC m=+862.863309378" observedRunningTime="2025-12-05 07:00:35.985861418 +0000 UTC m=+863.711858478" watchObservedRunningTime="2025-12-05 07:00:35.989621649 +0000 UTC m=+863.715618699" Dec 05 07:00:40 crc kubenswrapper[4863]: I1205 07:00:40.017003 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-q9rlj" Dec 05 07:00:40 crc kubenswrapper[4863]: I1205 07:00:40.267831 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:40 crc kubenswrapper[4863]: I1205 07:00:40.268398 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:40 crc kubenswrapper[4863]: I1205 07:00:40.278305 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:41 crc kubenswrapper[4863]: I1205 07:00:41.014153 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6b59954f46-zrbxm" Dec 05 07:00:41 crc kubenswrapper[4863]: I1205 07:00:41.081845 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 07:00:50 crc kubenswrapper[4863]: I1205 07:00:50.575424 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pfmd9" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.411639 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r"] Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.413723 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.416642 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.418508 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r"] Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.527488 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.527643 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxg6t\" (UniqueName: \"kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.527689 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.628515 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.628595 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxg6t\" (UniqueName: \"kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.628624 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.629111 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.629112 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.651720 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxg6t\" (UniqueName: \"kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:04 crc kubenswrapper[4863]: I1205 07:01:04.735970 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:05 crc kubenswrapper[4863]: I1205 07:01:05.051666 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r"] Dec 05 07:01:05 crc kubenswrapper[4863]: I1205 07:01:05.156140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerStarted","Data":"1bd507c4cad782f8bf99dd87fe47bada960a7c64299d0199171a2421b03c05e0"} Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.148794 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-wscq8" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" containerID="cri-o://739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a" gracePeriod=15 Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.748803 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wscq8_9e75585a-25ca-4d16-b2ca-33c520e209e1/console/0.log" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.748872 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858198 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858605 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858690 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858722 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858763 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858842 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.858860 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxnmm\" (UniqueName: \"kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm\") pod \"9e75585a-25ca-4d16-b2ca-33c520e209e1\" (UID: \"9e75585a-25ca-4d16-b2ca-33c520e209e1\") " Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.859203 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.859645 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config" (OuterVolumeSpecName: "console-config") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.859908 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.859971 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca" (OuterVolumeSpecName: "service-ca") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.865028 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.865759 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.865761 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm" (OuterVolumeSpecName: "kube-api-access-hxnmm") pod "9e75585a-25ca-4d16-b2ca-33c520e209e1" (UID: "9e75585a-25ca-4d16-b2ca-33c520e209e1"). InnerVolumeSpecName "kube-api-access-hxnmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960778 4863 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960818 4863 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960827 4863 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e75585a-25ca-4d16-b2ca-33c520e209e1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960836 4863 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960846 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxnmm\" (UniqueName: \"kubernetes.io/projected/9e75585a-25ca-4d16-b2ca-33c520e209e1-kube-api-access-hxnmm\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960854 4863 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:06 crc kubenswrapper[4863]: I1205 07:01:06.960862 4863 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9e75585a-25ca-4d16-b2ca-33c520e209e1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.189604 4863 generic.go:334] "Generic (PLEG): container finished" podID="035ae227-1541-4b39-b3af-315e31a00f0c" containerID="77cd776d0e84fc8f7df1090f00f669652c40dd2d4a51956f3675c2ed0dc599f2" exitCode=0 Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.189709 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerDied","Data":"77cd776d0e84fc8f7df1090f00f669652c40dd2d4a51956f3675c2ed0dc599f2"} Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192189 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wscq8_9e75585a-25ca-4d16-b2ca-33c520e209e1/console/0.log" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192231 4863 generic.go:334] "Generic (PLEG): container finished" podID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerID="739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a" exitCode=2 Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192296 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wscq8" event={"ID":"9e75585a-25ca-4d16-b2ca-33c520e209e1","Type":"ContainerDied","Data":"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a"} Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192322 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wscq8" event={"ID":"9e75585a-25ca-4d16-b2ca-33c520e209e1","Type":"ContainerDied","Data":"0756a416d256a80bf2aa3a481283a9dec115cbc794dfa39811c0bbdbf467c3fb"} Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192373 4863 scope.go:117] "RemoveContainer" containerID="739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.192607 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wscq8" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.216858 4863 scope.go:117] "RemoveContainer" containerID="739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a" Dec 05 07:01:07 crc kubenswrapper[4863]: E1205 07:01:07.220010 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a\": container with ID starting with 739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a not found: ID does not exist" containerID="739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.220351 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a"} err="failed to get container status \"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a\": rpc error: code = NotFound desc = could not find container \"739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a\": container with ID starting with 739f1034ec5c35b43c6aea4d43079827c6814c1f51b75110e469319bcdebbc8a not found: ID does not exist" Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.239901 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 07:01:07 crc kubenswrapper[4863]: I1205 07:01:07.243815 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-wscq8"] Dec 05 07:01:08 crc kubenswrapper[4863]: I1205 07:01:08.615285 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" path="/var/lib/kubelet/pods/9e75585a-25ca-4d16-b2ca-33c520e209e1/volumes" Dec 05 07:01:10 crc kubenswrapper[4863]: I1205 07:01:10.211438 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerStarted","Data":"405559dfe134a191a873b903c1dfb24eaee5a1169b5b0d4006fe4c801363881f"} Dec 05 07:01:11 crc kubenswrapper[4863]: I1205 07:01:11.218393 4863 generic.go:334] "Generic (PLEG): container finished" podID="035ae227-1541-4b39-b3af-315e31a00f0c" containerID="405559dfe134a191a873b903c1dfb24eaee5a1169b5b0d4006fe4c801363881f" exitCode=0 Dec 05 07:01:11 crc kubenswrapper[4863]: I1205 07:01:11.218455 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerDied","Data":"405559dfe134a191a873b903c1dfb24eaee5a1169b5b0d4006fe4c801363881f"} Dec 05 07:01:14 crc kubenswrapper[4863]: I1205 07:01:14.252449 4863 generic.go:334] "Generic (PLEG): container finished" podID="035ae227-1541-4b39-b3af-315e31a00f0c" containerID="75370fda669598a0db2d00b8ff6a573475fb080a4522f0ccd1966301884546f4" exitCode=0 Dec 05 07:01:14 crc kubenswrapper[4863]: I1205 07:01:14.252542 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerDied","Data":"75370fda669598a0db2d00b8ff6a573475fb080a4522f0ccd1966301884546f4"} Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.592554 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.683058 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util\") pod \"035ae227-1541-4b39-b3af-315e31a00f0c\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.683174 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle\") pod \"035ae227-1541-4b39-b3af-315e31a00f0c\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.683349 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxg6t\" (UniqueName: \"kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t\") pod \"035ae227-1541-4b39-b3af-315e31a00f0c\" (UID: \"035ae227-1541-4b39-b3af-315e31a00f0c\") " Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.684332 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle" (OuterVolumeSpecName: "bundle") pod "035ae227-1541-4b39-b3af-315e31a00f0c" (UID: "035ae227-1541-4b39-b3af-315e31a00f0c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.698570 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t" (OuterVolumeSpecName: "kube-api-access-xxg6t") pod "035ae227-1541-4b39-b3af-315e31a00f0c" (UID: "035ae227-1541-4b39-b3af-315e31a00f0c"). InnerVolumeSpecName "kube-api-access-xxg6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.702766 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util" (OuterVolumeSpecName: "util") pod "035ae227-1541-4b39-b3af-315e31a00f0c" (UID: "035ae227-1541-4b39-b3af-315e31a00f0c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.784873 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxg6t\" (UniqueName: \"kubernetes.io/projected/035ae227-1541-4b39-b3af-315e31a00f0c-kube-api-access-xxg6t\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.784918 4863 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-util\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:15 crc kubenswrapper[4863]: I1205 07:01:15.784937 4863 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/035ae227-1541-4b39-b3af-315e31a00f0c-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:01:16 crc kubenswrapper[4863]: I1205 07:01:16.273083 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" event={"ID":"035ae227-1541-4b39-b3af-315e31a00f0c","Type":"ContainerDied","Data":"1bd507c4cad782f8bf99dd87fe47bada960a7c64299d0199171a2421b03c05e0"} Dec 05 07:01:16 crc kubenswrapper[4863]: I1205 07:01:16.273163 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r" Dec 05 07:01:16 crc kubenswrapper[4863]: I1205 07:01:16.273139 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bd507c4cad782f8bf99dd87fe47bada960a7c64299d0199171a2421b03c05e0" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.620363 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr"] Dec 05 07:01:28 crc kubenswrapper[4863]: E1205 07:01:28.621977 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="util" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.622048 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="util" Dec 05 07:01:28 crc kubenswrapper[4863]: E1205 07:01:28.623018 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="extract" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.623093 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="extract" Dec 05 07:01:28 crc kubenswrapper[4863]: E1205 07:01:28.623151 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="pull" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.623214 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="pull" Dec 05 07:01:28 crc kubenswrapper[4863]: E1205 07:01:28.623288 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.623338 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.623510 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="035ae227-1541-4b39-b3af-315e31a00f0c" containerName="extract" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.623571 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e75585a-25ca-4d16-b2ca-33c520e209e1" containerName="console" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.624015 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.628038 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.628266 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.628609 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.629510 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.629626 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-6d9q6" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.634025 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr"] Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.743851 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-webhook-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.744456 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sdzh\" (UniqueName: \"kubernetes.io/projected/41176c68-379a-4430-aeb0-3e70be256b92-kube-api-access-9sdzh\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.744706 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-apiservice-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.846498 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-webhook-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.846565 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sdzh\" (UniqueName: \"kubernetes.io/projected/41176c68-379a-4430-aeb0-3e70be256b92-kube-api-access-9sdzh\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.846646 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-apiservice-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.853674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-webhook-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.853827 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41176c68-379a-4430-aeb0-3e70be256b92-apiservice-cert\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.871833 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sdzh\" (UniqueName: \"kubernetes.io/projected/41176c68-379a-4430-aeb0-3e70be256b92-kube-api-access-9sdzh\") pod \"metallb-operator-controller-manager-f688c6497-dlmsr\" (UID: \"41176c68-379a-4430-aeb0-3e70be256b92\") " pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.877098 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf"] Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.877996 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.881779 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.881854 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-jjjwg" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.889855 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf"] Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.890340 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.943646 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.948125 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgl52\" (UniqueName: \"kubernetes.io/projected/5157c01b-e8a3-4f95-8f36-a41e0faa358b-kube-api-access-dgl52\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.948410 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-webhook-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:28 crc kubenswrapper[4863]: I1205 07:01:28.948730 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-apiservice-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.051449 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-apiservice-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.051646 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgl52\" (UniqueName: \"kubernetes.io/projected/5157c01b-e8a3-4f95-8f36-a41e0faa358b-kube-api-access-dgl52\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.051682 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-webhook-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.056652 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-webhook-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.072033 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5157c01b-e8a3-4f95-8f36-a41e0faa358b-apiservice-cert\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.074318 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgl52\" (UniqueName: \"kubernetes.io/projected/5157c01b-e8a3-4f95-8f36-a41e0faa358b-kube-api-access-dgl52\") pod \"metallb-operator-webhook-server-855bbdbb8d-n4cpf\" (UID: \"5157c01b-e8a3-4f95-8f36-a41e0faa358b\") " pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.160356 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr"] Dec 05 07:01:29 crc kubenswrapper[4863]: W1205 07:01:29.180942 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41176c68_379a_4430_aeb0_3e70be256b92.slice/crio-0d70b06a4deca41fcc11aafc347fd48e35ae7df302894675b0bb3e06ce1022a6 WatchSource:0}: Error finding container 0d70b06a4deca41fcc11aafc347fd48e35ae7df302894675b0bb3e06ce1022a6: Status 404 returned error can't find the container with id 0d70b06a4deca41fcc11aafc347fd48e35ae7df302894675b0bb3e06ce1022a6 Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.221216 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.348417 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" event={"ID":"41176c68-379a-4430-aeb0-3e70be256b92","Type":"ContainerStarted","Data":"0d70b06a4deca41fcc11aafc347fd48e35ae7df302894675b0bb3e06ce1022a6"} Dec 05 07:01:29 crc kubenswrapper[4863]: I1205 07:01:29.443215 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf"] Dec 05 07:01:29 crc kubenswrapper[4863]: W1205 07:01:29.448352 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5157c01b_e8a3_4f95_8f36_a41e0faa358b.slice/crio-824fcb610e75e4f20474cf892607193741e37732e0a89667ef42263ae668fb55 WatchSource:0}: Error finding container 824fcb610e75e4f20474cf892607193741e37732e0a89667ef42263ae668fb55: Status 404 returned error can't find the container with id 824fcb610e75e4f20474cf892607193741e37732e0a89667ef42263ae668fb55 Dec 05 07:01:30 crc kubenswrapper[4863]: I1205 07:01:30.355880 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" event={"ID":"5157c01b-e8a3-4f95-8f36-a41e0faa358b","Type":"ContainerStarted","Data":"824fcb610e75e4f20474cf892607193741e37732e0a89667ef42263ae668fb55"} Dec 05 07:01:33 crc kubenswrapper[4863]: I1205 07:01:33.382417 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" event={"ID":"41176c68-379a-4430-aeb0-3e70be256b92","Type":"ContainerStarted","Data":"19b4564e1a89c4c3bea0e92005ae01277ce278d88ef8f51bf2d055336a7094b0"} Dec 05 07:01:33 crc kubenswrapper[4863]: I1205 07:01:33.382688 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:01:33 crc kubenswrapper[4863]: I1205 07:01:33.413344 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" podStartSLOduration=2.5030927050000003 podStartE2EDuration="5.413328023s" podCreationTimestamp="2025-12-05 07:01:28 +0000 UTC" firstStartedPulling="2025-12-05 07:01:29.182733113 +0000 UTC m=+916.908730153" lastFinishedPulling="2025-12-05 07:01:32.092968431 +0000 UTC m=+919.818965471" observedRunningTime="2025-12-05 07:01:33.411654523 +0000 UTC m=+921.137651603" watchObservedRunningTime="2025-12-05 07:01:33.413328023 +0000 UTC m=+921.139325063" Dec 05 07:01:36 crc kubenswrapper[4863]: I1205 07:01:36.442638 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" event={"ID":"5157c01b-e8a3-4f95-8f36-a41e0faa358b","Type":"ContainerStarted","Data":"0b12557102256de7831b96da4c9544875b02e2bfc6a854114dfc620f4017ea82"} Dec 05 07:01:37 crc kubenswrapper[4863]: I1205 07:01:37.454919 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:01:37 crc kubenswrapper[4863]: I1205 07:01:37.480267 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" podStartSLOduration=3.523466047 podStartE2EDuration="9.480249809s" podCreationTimestamp="2025-12-05 07:01:28 +0000 UTC" firstStartedPulling="2025-12-05 07:01:29.451944191 +0000 UTC m=+917.177941231" lastFinishedPulling="2025-12-05 07:01:35.408727953 +0000 UTC m=+923.134724993" observedRunningTime="2025-12-05 07:01:37.479820539 +0000 UTC m=+925.205817619" watchObservedRunningTime="2025-12-05 07:01:37.480249809 +0000 UTC m=+925.206246849" Dec 05 07:01:38 crc kubenswrapper[4863]: I1205 07:01:38.464465 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:01:38 crc kubenswrapper[4863]: I1205 07:01:38.464657 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:01:49 crc kubenswrapper[4863]: I1205 07:01:49.228684 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" Dec 05 07:02:08 crc kubenswrapper[4863]: I1205 07:02:08.464438 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:02:08 crc kubenswrapper[4863]: I1205 07:02:08.465163 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:02:08 crc kubenswrapper[4863]: I1205 07:02:08.947986 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f688c6497-dlmsr" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.733878 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.734531 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.738362 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.738647 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-xbtgd" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.754402 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.786691 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-vvf5m"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.789486 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.795523 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.795909 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.844776 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-v9rgb"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.845933 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-v9rgb" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.851460 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.851663 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.853523 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-kg225" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.853700 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.872979 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-ft6gg"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.874042 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.883094 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-ft6gg"] Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.883946 4863 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909146 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-reloader\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909205 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkhsb\" (UniqueName: \"kubernetes.io/projected/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-kube-api-access-xkhsb\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909257 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/821b5a5d-142c-4637-b602-a95bc2738916-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909294 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-conf\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909317 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909343 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87qkj\" (UniqueName: \"kubernetes.io/projected/821b5a5d-142c-4637-b602-a95bc2738916-kube-api-access-87qkj\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909367 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-sockets\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909406 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:09 crc kubenswrapper[4863]: I1205 07:02:09.909435 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-startup\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010056 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-reloader\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010099 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-metrics-certs\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010123 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkhsb\" (UniqueName: \"kubernetes.io/projected/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-kube-api-access-xkhsb\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010140 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-cert\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010160 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010183 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010205 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/821b5a5d-142c-4637-b602-a95bc2738916-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010231 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-conf\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010245 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010261 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87qkj\" (UniqueName: \"kubernetes.io/projected/821b5a5d-142c-4637-b602-a95bc2738916-kube-api-access-87qkj\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010276 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-sockets\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010297 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcsz4\" (UniqueName: \"kubernetes.io/projected/30337d56-e873-41ea-96ff-081bf51b8cc0-kube-api-access-fcsz4\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010322 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x4hl\" (UniqueName: \"kubernetes.io/projected/f621812d-5a52-432c-afbd-23cd824480af-kube-api-access-9x4hl\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010342 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010361 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f621812d-5a52-432c-afbd-23cd824480af-metallb-excludel2\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.010378 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-startup\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.011007 4863 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.011054 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs podName:c0a72809-7f21-4896-a9c2-03db9fa6ecd0 nodeName:}" failed. No retries permitted until 2025-12-05 07:02:10.511039414 +0000 UTC m=+958.237036444 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs") pod "frr-k8s-vvf5m" (UID: "c0a72809-7f21-4896-a9c2-03db9fa6ecd0") : secret "frr-k8s-certs-secret" not found Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.011215 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-startup\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.011338 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.011398 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-reloader\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.011435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-conf\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.011460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-frr-sockets\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.016669 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/821b5a5d-142c-4637-b602-a95bc2738916-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.026931 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkhsb\" (UniqueName: \"kubernetes.io/projected/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-kube-api-access-xkhsb\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.035082 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87qkj\" (UniqueName: \"kubernetes.io/projected/821b5a5d-142c-4637-b602-a95bc2738916-kube-api-access-87qkj\") pod \"frr-k8s-webhook-server-7fcb986d4-7c6gb\" (UID: \"821b5a5d-142c-4637-b602-a95bc2738916\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.052928 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111154 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f621812d-5a52-432c-afbd-23cd824480af-metallb-excludel2\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111228 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-metrics-certs\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111262 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-cert\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111288 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111322 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcsz4\" (UniqueName: \"kubernetes.io/projected/30337d56-e873-41ea-96ff-081bf51b8cc0-kube-api-access-fcsz4\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.111411 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x4hl\" (UniqueName: \"kubernetes.io/projected/f621812d-5a52-432c-afbd-23cd824480af-kube-api-access-9x4hl\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.111634 4863 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.111700 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs podName:30337d56-e873-41ea-96ff-081bf51b8cc0 nodeName:}" failed. No retries permitted until 2025-12-05 07:02:10.6116827 +0000 UTC m=+958.337679740 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs") pod "controller-f8648f98b-ft6gg" (UID: "30337d56-e873-41ea-96ff-081bf51b8cc0") : secret "controller-certs-secret" not found Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.111819 4863 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.111856 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist podName:f621812d-5a52-432c-afbd-23cd824480af nodeName:}" failed. No retries permitted until 2025-12-05 07:02:10.611845574 +0000 UTC m=+958.337842614 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist") pod "speaker-v9rgb" (UID: "f621812d-5a52-432c-afbd-23cd824480af") : secret "metallb-memberlist" not found Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.112502 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f621812d-5a52-432c-afbd-23cd824480af-metallb-excludel2\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.116884 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-metrics-certs\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.124185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-cert\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.128732 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcsz4\" (UniqueName: \"kubernetes.io/projected/30337d56-e873-41ea-96ff-081bf51b8cc0-kube-api-access-fcsz4\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.134588 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x4hl\" (UniqueName: \"kubernetes.io/projected/f621812d-5a52-432c-afbd-23cd824480af-kube-api-access-9x4hl\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.517046 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.521331 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0a72809-7f21-4896-a9c2-03db9fa6ecd0-metrics-certs\") pod \"frr-k8s-vvf5m\" (UID: \"c0a72809-7f21-4896-a9c2-03db9fa6ecd0\") " pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.557638 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb"] Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.618224 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.618300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.618408 4863 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 07:02:10 crc kubenswrapper[4863]: E1205 07:02:10.618503 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist podName:f621812d-5a52-432c-afbd-23cd824480af nodeName:}" failed. No retries permitted until 2025-12-05 07:02:11.618480644 +0000 UTC m=+959.344477684 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist") pod "speaker-v9rgb" (UID: "f621812d-5a52-432c-afbd-23cd824480af") : secret "metallb-memberlist" not found Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.621583 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30337d56-e873-41ea-96ff-081bf51b8cc0-metrics-certs\") pod \"controller-f8648f98b-ft6gg\" (UID: \"30337d56-e873-41ea-96ff-081bf51b8cc0\") " pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.652770 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" event={"ID":"821b5a5d-142c-4637-b602-a95bc2738916","Type":"ContainerStarted","Data":"4de13d72db3aba4717651c3576a290bb2dc8f9759a1d6401aa4770311e671c2d"} Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.721903 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.791938 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:10 crc kubenswrapper[4863]: I1205 07:02:10.974547 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-ft6gg"] Dec 05 07:02:10 crc kubenswrapper[4863]: W1205 07:02:10.977570 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30337d56_e873_41ea_96ff_081bf51b8cc0.slice/crio-86c0eb5be7660a760189cbd69c0127a49860b3f05c256b95199893e259da6b1e WatchSource:0}: Error finding container 86c0eb5be7660a760189cbd69c0127a49860b3f05c256b95199893e259da6b1e: Status 404 returned error can't find the container with id 86c0eb5be7660a760189cbd69c0127a49860b3f05c256b95199893e259da6b1e Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.631725 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.644553 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f621812d-5a52-432c-afbd-23cd824480af-memberlist\") pod \"speaker-v9rgb\" (UID: \"f621812d-5a52-432c-afbd-23cd824480af\") " pod="metallb-system/speaker-v9rgb" Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.661120 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-v9rgb" Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.677367 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"aa1ae85f2735c59c375d4f67fe540dde31ff36ee3201cccaa2b16e5461512d29"} Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.679759 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ft6gg" event={"ID":"30337d56-e873-41ea-96ff-081bf51b8cc0","Type":"ContainerStarted","Data":"34011184bd571f8f38cb9fbc612aee1a73f20c5155a2b920657650f03cc13736"} Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.679790 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ft6gg" event={"ID":"30337d56-e873-41ea-96ff-081bf51b8cc0","Type":"ContainerStarted","Data":"86c0eb5be7660a760189cbd69c0127a49860b3f05c256b95199893e259da6b1e"} Dec 05 07:02:11 crc kubenswrapper[4863]: I1205 07:02:11.680750 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.638411 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-ft6gg" podStartSLOduration=3.638383215 podStartE2EDuration="3.638383215s" podCreationTimestamp="2025-12-05 07:02:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:02:11.698615576 +0000 UTC m=+959.424612626" watchObservedRunningTime="2025-12-05 07:02:12.638383215 +0000 UTC m=+960.364380255" Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.697749 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ft6gg" event={"ID":"30337d56-e873-41ea-96ff-081bf51b8cc0","Type":"ContainerStarted","Data":"9089a800e4763bf91c41d4ba08f174d164766c9f39fc83db021baccf8001f197"} Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.701612 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-v9rgb" event={"ID":"f621812d-5a52-432c-afbd-23cd824480af","Type":"ContainerStarted","Data":"5c7d3fc3ce9ba427371353be0d075283c955ea3e073923925fe5c573fdc9b677"} Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.701646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-v9rgb" event={"ID":"f621812d-5a52-432c-afbd-23cd824480af","Type":"ContainerStarted","Data":"03b3fbaec402b3b9166460abc114c0af56557aa448d5bea1f7a40e0cc9b52928"} Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.701655 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-v9rgb" event={"ID":"f621812d-5a52-432c-afbd-23cd824480af","Type":"ContainerStarted","Data":"78f4a4b78e6f00fa89879f927c56ba43114d7dd79e1fecc6b489ad6c57eec504"} Dec 05 07:02:12 crc kubenswrapper[4863]: I1205 07:02:12.702243 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-v9rgb" Dec 05 07:02:17 crc kubenswrapper[4863]: I1205 07:02:17.735301 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" event={"ID":"821b5a5d-142c-4637-b602-a95bc2738916","Type":"ContainerStarted","Data":"66700068bdb2b851f2896b326ee764b7c8a203e84e93e427a381f0162142ec51"} Dec 05 07:02:17 crc kubenswrapper[4863]: I1205 07:02:17.736043 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:17 crc kubenswrapper[4863]: I1205 07:02:17.737917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"734da2965cb9706142dd8b7ca26c1c0b2245058043ee085f7c1853d5bfd96d2e"} Dec 05 07:02:17 crc kubenswrapper[4863]: I1205 07:02:17.754116 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-v9rgb" podStartSLOduration=8.754090317 podStartE2EDuration="8.754090317s" podCreationTimestamp="2025-12-05 07:02:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:02:12.729661355 +0000 UTC m=+960.455658385" watchObservedRunningTime="2025-12-05 07:02:17.754090317 +0000 UTC m=+965.480087367" Dec 05 07:02:17 crc kubenswrapper[4863]: I1205 07:02:17.757116 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" podStartSLOduration=1.7546675349999998 podStartE2EDuration="8.757108359s" podCreationTimestamp="2025-12-05 07:02:09 +0000 UTC" firstStartedPulling="2025-12-05 07:02:10.563364115 +0000 UTC m=+958.289361155" lastFinishedPulling="2025-12-05 07:02:17.565804909 +0000 UTC m=+965.291801979" observedRunningTime="2025-12-05 07:02:17.755675155 +0000 UTC m=+965.481672215" watchObservedRunningTime="2025-12-05 07:02:17.757108359 +0000 UTC m=+965.483105409" Dec 05 07:02:18 crc kubenswrapper[4863]: I1205 07:02:18.747405 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0a72809-7f21-4896-a9c2-03db9fa6ecd0" containerID="734da2965cb9706142dd8b7ca26c1c0b2245058043ee085f7c1853d5bfd96d2e" exitCode=0 Dec 05 07:02:18 crc kubenswrapper[4863]: I1205 07:02:18.747445 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0a72809-7f21-4896-a9c2-03db9fa6ecd0" containerID="8473344d43381841dd092684efc3b1d7fc78a6c5fd60aa47e6565988486e5cfd" exitCode=0 Dec 05 07:02:18 crc kubenswrapper[4863]: I1205 07:02:18.747458 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerDied","Data":"734da2965cb9706142dd8b7ca26c1c0b2245058043ee085f7c1853d5bfd96d2e"} Dec 05 07:02:18 crc kubenswrapper[4863]: I1205 07:02:18.747549 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerDied","Data":"8473344d43381841dd092684efc3b1d7fc78a6c5fd60aa47e6565988486e5cfd"} Dec 05 07:02:19 crc kubenswrapper[4863]: I1205 07:02:19.758351 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0a72809-7f21-4896-a9c2-03db9fa6ecd0" containerID="19556de5be954eb53479efabe0f1686d8cbccb4116ced386e1f8f74b5804b28e" exitCode=0 Dec 05 07:02:19 crc kubenswrapper[4863]: I1205 07:02:19.758401 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerDied","Data":"19556de5be954eb53479efabe0f1686d8cbccb4116ced386e1f8f74b5804b28e"} Dec 05 07:02:20 crc kubenswrapper[4863]: I1205 07:02:20.769972 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"3cd6a9e898177f32998613f0bce7bd4ab5ab30c62d9c982f7c3bf005d1e6fc6b"} Dec 05 07:02:20 crc kubenswrapper[4863]: I1205 07:02:20.770239 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"dd2fdb260e144ccc5c75f86e0bd8241234bc7864d68e42b06ab8c1ecf4bf4f10"} Dec 05 07:02:20 crc kubenswrapper[4863]: I1205 07:02:20.770250 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"c09bafc1388c3a572ab8e3b35f0b6e78673c826617fc23c6c34161c7b059b5fb"} Dec 05 07:02:20 crc kubenswrapper[4863]: I1205 07:02:20.770259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"d6e082eae030df107eeb400977174b662fbc64c3a24cdb413b8e03bc373d388d"} Dec 05 07:02:20 crc kubenswrapper[4863]: I1205 07:02:20.770268 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"0a1c88ea41765c09fb73dd16c0d5871e876f1d70ad03fc78a652c531f71f25ba"} Dec 05 07:02:21 crc kubenswrapper[4863]: I1205 07:02:21.677930 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-v9rgb" Dec 05 07:02:21 crc kubenswrapper[4863]: I1205 07:02:21.784215 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vvf5m" event={"ID":"c0a72809-7f21-4896-a9c2-03db9fa6ecd0","Type":"ContainerStarted","Data":"78ba3dfd9b1a00e6012685ba861fd13cddc84f61bd359e9d08ee666f894b24b6"} Dec 05 07:02:21 crc kubenswrapper[4863]: I1205 07:02:21.785192 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:21 crc kubenswrapper[4863]: I1205 07:02:21.825939 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-vvf5m" podStartSLOduration=6.285120493 podStartE2EDuration="12.82591904s" podCreationTimestamp="2025-12-05 07:02:09 +0000 UTC" firstStartedPulling="2025-12-05 07:02:11.007355477 +0000 UTC m=+958.733352517" lastFinishedPulling="2025-12-05 07:02:17.548154004 +0000 UTC m=+965.274151064" observedRunningTime="2025-12-05 07:02:21.825073561 +0000 UTC m=+969.551070611" watchObservedRunningTime="2025-12-05 07:02:21.82591904 +0000 UTC m=+969.551916100" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.159027 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr"] Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.161139 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.169305 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.208864 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr"] Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.298275 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzww9\" (UniqueName: \"kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.298328 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.298661 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.399744 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.399855 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzww9\" (UniqueName: \"kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.399890 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.400422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.400454 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.421643 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzww9\" (UniqueName: \"kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.484654 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.726107 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr"] Dec 05 07:02:23 crc kubenswrapper[4863]: W1205 07:02:23.735543 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ff8deee_f74b_43e5_8e15_d2f5685fcb74.slice/crio-09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1 WatchSource:0}: Error finding container 09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1: Status 404 returned error can't find the container with id 09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1 Dec 05 07:02:23 crc kubenswrapper[4863]: I1205 07:02:23.803082 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" event={"ID":"8ff8deee-f74b-43e5-8e15-d2f5685fcb74","Type":"ContainerStarted","Data":"09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1"} Dec 05 07:02:24 crc kubenswrapper[4863]: I1205 07:02:24.811193 4863 generic.go:334] "Generic (PLEG): container finished" podID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerID="17d8478d7d635f2bb2139f1eab6b7ecbf7152af79efec23436a1b4c57b790522" exitCode=0 Dec 05 07:02:24 crc kubenswrapper[4863]: I1205 07:02:24.811316 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" event={"ID":"8ff8deee-f74b-43e5-8e15-d2f5685fcb74","Type":"ContainerDied","Data":"17d8478d7d635f2bb2139f1eab6b7ecbf7152af79efec23436a1b4c57b790522"} Dec 05 07:02:25 crc kubenswrapper[4863]: I1205 07:02:25.726872 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:25 crc kubenswrapper[4863]: I1205 07:02:25.791078 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:30 crc kubenswrapper[4863]: I1205 07:02:30.056879 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-7c6gb" Dec 05 07:02:30 crc kubenswrapper[4863]: I1205 07:02:30.725759 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-vvf5m" Dec 05 07:02:30 crc kubenswrapper[4863]: I1205 07:02:30.795887 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-ft6gg" Dec 05 07:02:30 crc kubenswrapper[4863]: I1205 07:02:30.848020 4863 generic.go:334] "Generic (PLEG): container finished" podID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerID="0d7f503aa9101f3dec4472624e8e21737fa7264bee956cd76352eff9d9083f25" exitCode=0 Dec 05 07:02:30 crc kubenswrapper[4863]: I1205 07:02:30.848100 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" event={"ID":"8ff8deee-f74b-43e5-8e15-d2f5685fcb74","Type":"ContainerDied","Data":"0d7f503aa9101f3dec4472624e8e21737fa7264bee956cd76352eff9d9083f25"} Dec 05 07:02:32 crc kubenswrapper[4863]: I1205 07:02:32.868231 4863 generic.go:334] "Generic (PLEG): container finished" podID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerID="a52af272f00fa7524597bd26189824bf761165b5b8748d8e137169bff24aedd6" exitCode=0 Dec 05 07:02:32 crc kubenswrapper[4863]: I1205 07:02:32.868274 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" event={"ID":"8ff8deee-f74b-43e5-8e15-d2f5685fcb74","Type":"ContainerDied","Data":"a52af272f00fa7524597bd26189824bf761165b5b8748d8e137169bff24aedd6"} Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.212555 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.352294 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util\") pod \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.352436 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzww9\" (UniqueName: \"kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9\") pod \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.352531 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle\") pod \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\" (UID: \"8ff8deee-f74b-43e5-8e15-d2f5685fcb74\") " Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.353844 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle" (OuterVolumeSpecName: "bundle") pod "8ff8deee-f74b-43e5-8e15-d2f5685fcb74" (UID: "8ff8deee-f74b-43e5-8e15-d2f5685fcb74"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.363310 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9" (OuterVolumeSpecName: "kube-api-access-lzww9") pod "8ff8deee-f74b-43e5-8e15-d2f5685fcb74" (UID: "8ff8deee-f74b-43e5-8e15-d2f5685fcb74"). InnerVolumeSpecName "kube-api-access-lzww9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.369488 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util" (OuterVolumeSpecName: "util") pod "8ff8deee-f74b-43e5-8e15-d2f5685fcb74" (UID: "8ff8deee-f74b-43e5-8e15-d2f5685fcb74"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.454404 4863 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-util\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.454682 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzww9\" (UniqueName: \"kubernetes.io/projected/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-kube-api-access-lzww9\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.454693 4863 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ff8deee-f74b-43e5-8e15-d2f5685fcb74-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.882692 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" event={"ID":"8ff8deee-f74b-43e5-8e15-d2f5685fcb74","Type":"ContainerDied","Data":"09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1"} Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.882729 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09890806db5baef790372ac202ea47553eabe5f44c2593ed0e41090d9d6fb6e1" Dec 05 07:02:34 crc kubenswrapper[4863]: I1205 07:02:34.882779 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr" Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.463828 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.464092 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.464135 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.464708 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.464758 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4" gracePeriod=600 Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.911542 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4" exitCode=0 Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.911641 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4"} Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.911937 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff"} Dec 05 07:02:38 crc kubenswrapper[4863]: I1205 07:02:38.911965 4863 scope.go:117] "RemoveContainer" containerID="b8c26117c21159b1d8cea8a3a8c1b0a943220d2e10ef93da3165363f83cb28c0" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.726535 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb"] Dec 05 07:02:40 crc kubenswrapper[4863]: E1205 07:02:40.727421 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="util" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.727442 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="util" Dec 05 07:02:40 crc kubenswrapper[4863]: E1205 07:02:40.727487 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="pull" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.727498 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="pull" Dec 05 07:02:40 crc kubenswrapper[4863]: E1205 07:02:40.727510 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="extract" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.727519 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="extract" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.727649 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff8deee-f74b-43e5-8e15-d2f5685fcb74" containerName="extract" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.728199 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.730212 4863 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-mw52j" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.730625 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.733895 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.781029 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb"] Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.835758 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a96169f5-d8fa-4dd9-9a35-187713f92ae2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.835946 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv6z2\" (UniqueName: \"kubernetes.io/projected/a96169f5-d8fa-4dd9-9a35-187713f92ae2-kube-api-access-dv6z2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.937164 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv6z2\" (UniqueName: \"kubernetes.io/projected/a96169f5-d8fa-4dd9-9a35-187713f92ae2-kube-api-access-dv6z2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.937229 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a96169f5-d8fa-4dd9-9a35-187713f92ae2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.937877 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a96169f5-d8fa-4dd9-9a35-187713f92ae2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:40 crc kubenswrapper[4863]: I1205 07:02:40.956940 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv6z2\" (UniqueName: \"kubernetes.io/projected/a96169f5-d8fa-4dd9-9a35-187713f92ae2-kube-api-access-dv6z2\") pod \"cert-manager-operator-controller-manager-64cf6dff88-4h5cb\" (UID: \"a96169f5-d8fa-4dd9-9a35-187713f92ae2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:41 crc kubenswrapper[4863]: I1205 07:02:41.044399 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" Dec 05 07:02:41 crc kubenswrapper[4863]: I1205 07:02:41.470744 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb"] Dec 05 07:02:41 crc kubenswrapper[4863]: W1205 07:02:41.484376 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda96169f5_d8fa_4dd9_9a35_187713f92ae2.slice/crio-43077de388c327380de9b98a10de0b43561e58ff50dc6d957de1965b667e8a7d WatchSource:0}: Error finding container 43077de388c327380de9b98a10de0b43561e58ff50dc6d957de1965b667e8a7d: Status 404 returned error can't find the container with id 43077de388c327380de9b98a10de0b43561e58ff50dc6d957de1965b667e8a7d Dec 05 07:02:41 crc kubenswrapper[4863]: I1205 07:02:41.934363 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" event={"ID":"a96169f5-d8fa-4dd9-9a35-187713f92ae2","Type":"ContainerStarted","Data":"43077de388c327380de9b98a10de0b43561e58ff50dc6d957de1965b667e8a7d"} Dec 05 07:02:47 crc kubenswrapper[4863]: I1205 07:02:47.978881 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" event={"ID":"a96169f5-d8fa-4dd9-9a35-187713f92ae2","Type":"ContainerStarted","Data":"eded6708c70aca2576cd660bd3929b1fdeed0012d798ceab384491d09b4e633f"} Dec 05 07:02:48 crc kubenswrapper[4863]: I1205 07:02:48.003642 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-4h5cb" podStartSLOduration=1.835338303 podStartE2EDuration="8.003625568s" podCreationTimestamp="2025-12-05 07:02:40 +0000 UTC" firstStartedPulling="2025-12-05 07:02:41.486098419 +0000 UTC m=+989.212095459" lastFinishedPulling="2025-12-05 07:02:47.654385684 +0000 UTC m=+995.380382724" observedRunningTime="2025-12-05 07:02:48.002517701 +0000 UTC m=+995.728514761" watchObservedRunningTime="2025-12-05 07:02:48.003625568 +0000 UTC m=+995.729622608" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.289960 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn"] Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.292431 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.295666 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.295812 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.297108 4863 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-2km28" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.319304 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn"] Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.415573 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.415670 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7qfx\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-kube-api-access-t7qfx\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.516990 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7qfx\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-kube-api-access-t7qfx\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.517126 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.538314 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7qfx\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-kube-api-access-t7qfx\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.539069 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ddcfff38-07b0-4814-9e0f-a7265dd82da3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-cdqhn\" (UID: \"ddcfff38-07b0-4814-9e0f-a7265dd82da3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:53 crc kubenswrapper[4863]: I1205 07:02:53.616052 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.042735 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn"] Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.492489 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-2rknr"] Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.493280 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.495401 4863 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-46f68" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.505535 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-2rknr"] Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.632131 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.632235 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5km8\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-kube-api-access-h5km8\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.733247 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.733583 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5km8\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-kube-api-access-h5km8\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.753587 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5km8\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-kube-api-access-h5km8\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.761961 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5a50e280-47ac-43e1-9d3a-722cd11cafcc-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-2rknr\" (UID: \"5a50e280-47ac-43e1-9d3a-722cd11cafcc\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:54 crc kubenswrapper[4863]: I1205 07:02:54.811496 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:02:55 crc kubenswrapper[4863]: I1205 07:02:55.025117 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-2rknr"] Dec 05 07:02:55 crc kubenswrapper[4863]: W1205 07:02:55.029005 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a50e280_47ac_43e1_9d3a_722cd11cafcc.slice/crio-36538fea12a56357be644e21932fb9c16a8c37bf7ad9fbda1066bb5235d93dd5 WatchSource:0}: Error finding container 36538fea12a56357be644e21932fb9c16a8c37bf7ad9fbda1066bb5235d93dd5: Status 404 returned error can't find the container with id 36538fea12a56357be644e21932fb9c16a8c37bf7ad9fbda1066bb5235d93dd5 Dec 05 07:02:55 crc kubenswrapper[4863]: I1205 07:02:55.049696 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" event={"ID":"5a50e280-47ac-43e1-9d3a-722cd11cafcc","Type":"ContainerStarted","Data":"36538fea12a56357be644e21932fb9c16a8c37bf7ad9fbda1066bb5235d93dd5"} Dec 05 07:02:55 crc kubenswrapper[4863]: I1205 07:02:55.053288 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" event={"ID":"ddcfff38-07b0-4814-9e0f-a7265dd82da3","Type":"ContainerStarted","Data":"64cb41ac41d287bab459736e99d5d5194820b73111e16839319027015ade8431"} Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.508533 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df" Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.509143 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-cainjector,Image:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,Command:[/app/cmd/cainjector/cainjector],Args:[--leader-election-namespace=kube-system --v=2],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:9402,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bound-sa-token,ReadOnly:true,MountPath:/var/run/secrets/openshift/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t7qfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-cainjector-855d9ccff4-cdqhn_cert-manager(ddcfff38-07b0-4814-9e0f-a7265dd82da3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.510363 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-cainjector\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" podUID="ddcfff38-07b0-4814-9e0f-a7265dd82da3" Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.548118 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df" Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.548370 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-webhook,Image:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,Command:[/app/cmd/webhook/webhook],Args:[--dynamic-serving-ca-secret-name=cert-manager-webhook-ca --dynamic-serving-ca-secret-namespace=$(POD_NAMESPACE) --dynamic-serving-dns-names=cert-manager-webhook,cert-manager-webhook.$(POD_NAMESPACE),cert-manager-webhook.$(POD_NAMESPACE).svc --secure-port=10250 --v=2],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:10250,Protocol:TCP,HostIP:,},ContainerPort{Name:healthcheck,HostPort:0,ContainerPort:6080,Protocol:TCP,HostIP:,},ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:9402,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bound-sa-token,ReadOnly:true,MountPath:/var/run/secrets/openshift/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h5km8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{1 0 healthcheck},Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:60,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{1 0 healthcheck},Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-webhook-f4fb5df64-2rknr_cert-manager(5a50e280-47ac-43e1-9d3a-722cd11cafcc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 07:03:08 crc kubenswrapper[4863]: E1205 07:03:08.550336 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" podUID="5a50e280-47ac-43e1-9d3a-722cd11cafcc" Dec 05 07:03:09 crc kubenswrapper[4863]: E1205 07:03:09.169082 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df\\\"\"" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" podUID="5a50e280-47ac-43e1-9d3a-722cd11cafcc" Dec 05 07:03:09 crc kubenswrapper[4863]: E1205 07:03:09.169155 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-cainjector\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df\\\"\"" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" podUID="ddcfff38-07b0-4814-9e0f-a7265dd82da3" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.270236 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-srsmr"] Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.271416 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.275168 4863 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-5mwzw" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.285099 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-srsmr"] Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.354847 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-bound-sa-token\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.355114 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgkxf\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-kube-api-access-wgkxf\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.456397 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgkxf\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-kube-api-access-wgkxf\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.456462 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-bound-sa-token\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.476192 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-bound-sa-token\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.476283 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgkxf\" (UniqueName: \"kubernetes.io/projected/b6ca1063-26d1-494c-90ba-3950f5351316-kube-api-access-wgkxf\") pod \"cert-manager-86cb77c54b-srsmr\" (UID: \"b6ca1063-26d1-494c-90ba-3950f5351316\") " pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.643920 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-srsmr" Dec 05 07:03:10 crc kubenswrapper[4863]: I1205 07:03:10.845323 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-srsmr"] Dec 05 07:03:11 crc kubenswrapper[4863]: I1205 07:03:11.181290 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-srsmr" event={"ID":"b6ca1063-26d1-494c-90ba-3950f5351316","Type":"ContainerStarted","Data":"fa14867784e4d6ec963a981151e433cd9ee1d2c5eb8a29ff42615fae213b2b2a"} Dec 05 07:03:15 crc kubenswrapper[4863]: I1205 07:03:15.235696 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-srsmr" event={"ID":"b6ca1063-26d1-494c-90ba-3950f5351316","Type":"ContainerStarted","Data":"498a987ff38699575b604c6632ca031842b22f8c2e9dce9c9898e88feb5045cb"} Dec 05 07:03:15 crc kubenswrapper[4863]: I1205 07:03:15.254296 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-srsmr" podStartSLOduration=1.88592559 podStartE2EDuration="5.254273781s" podCreationTimestamp="2025-12-05 07:03:10 +0000 UTC" firstStartedPulling="2025-12-05 07:03:10.859904503 +0000 UTC m=+1018.585901543" lastFinishedPulling="2025-12-05 07:03:14.228252674 +0000 UTC m=+1021.954249734" observedRunningTime="2025-12-05 07:03:15.254103106 +0000 UTC m=+1022.980100206" watchObservedRunningTime="2025-12-05 07:03:15.254273781 +0000 UTC m=+1022.980270841" Dec 05 07:03:21 crc kubenswrapper[4863]: I1205 07:03:21.285917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" event={"ID":"5a50e280-47ac-43e1-9d3a-722cd11cafcc","Type":"ContainerStarted","Data":"2599b432fa7080cbd329e0db93b9bebd5d79ed4bbf441f9bc337c4f2a9effdad"} Dec 05 07:03:21 crc kubenswrapper[4863]: I1205 07:03:21.286730 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:03:21 crc kubenswrapper[4863]: I1205 07:03:21.315119 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" podStartSLOduration=-9223372009.539677 podStartE2EDuration="27.31509889s" podCreationTimestamp="2025-12-05 07:02:54 +0000 UTC" firstStartedPulling="2025-12-05 07:02:55.031560422 +0000 UTC m=+1002.757557472" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:03:21.309613227 +0000 UTC m=+1029.035610267" watchObservedRunningTime="2025-12-05 07:03:21.31509889 +0000 UTC m=+1029.041095930" Dec 05 07:03:23 crc kubenswrapper[4863]: I1205 07:03:23.299637 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" event={"ID":"ddcfff38-07b0-4814-9e0f-a7265dd82da3","Type":"ContainerStarted","Data":"b5992d2d30aa0414781d88c43430c228c3e4a8a6e9a77990c0087fda8f9ef7f0"} Dec 05 07:03:23 crc kubenswrapper[4863]: I1205 07:03:23.323663 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-cdqhn" podStartSLOduration=-9223372006.53113 podStartE2EDuration="30.323646923s" podCreationTimestamp="2025-12-05 07:02:53 +0000 UTC" firstStartedPulling="2025-12-05 07:02:54.049332633 +0000 UTC m=+1001.775329673" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:03:23.320965768 +0000 UTC m=+1031.046962808" watchObservedRunningTime="2025-12-05 07:03:23.323646923 +0000 UTC m=+1031.049643963" Dec 05 07:03:29 crc kubenswrapper[4863]: I1205 07:03:29.815822 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-2rknr" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.402175 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.403385 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.406939 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.407143 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.407301 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-8c5ws" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.440970 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.505610 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfz2f\" (UniqueName: \"kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f\") pod \"openstack-operator-index-sk9th\" (UID: \"2d45c10f-a82a-40de-ae81-b6ed644124bb\") " pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.606206 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfz2f\" (UniqueName: \"kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f\") pod \"openstack-operator-index-sk9th\" (UID: \"2d45c10f-a82a-40de-ae81-b6ed644124bb\") " pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.625150 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfz2f\" (UniqueName: \"kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f\") pod \"openstack-operator-index-sk9th\" (UID: \"2d45c10f-a82a-40de-ae81-b6ed644124bb\") " pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:33 crc kubenswrapper[4863]: I1205 07:03:33.724542 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:34 crc kubenswrapper[4863]: I1205 07:03:34.162661 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:34 crc kubenswrapper[4863]: W1205 07:03:34.168153 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d45c10f_a82a_40de_ae81_b6ed644124bb.slice/crio-0644c4f2ce69b861bac3fffba7f2d415e7872fe3c0d3e608de3a74c8bb277967 WatchSource:0}: Error finding container 0644c4f2ce69b861bac3fffba7f2d415e7872fe3c0d3e608de3a74c8bb277967: Status 404 returned error can't find the container with id 0644c4f2ce69b861bac3fffba7f2d415e7872fe3c0d3e608de3a74c8bb277967 Dec 05 07:03:34 crc kubenswrapper[4863]: I1205 07:03:34.373340 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sk9th" event={"ID":"2d45c10f-a82a-40de-ae81-b6ed644124bb","Type":"ContainerStarted","Data":"0644c4f2ce69b861bac3fffba7f2d415e7872fe3c0d3e608de3a74c8bb277967"} Dec 05 07:03:36 crc kubenswrapper[4863]: I1205 07:03:36.774370 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.390575 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-f2hhh"] Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.391594 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.405194 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f2hhh"] Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.471288 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9h6g\" (UniqueName: \"kubernetes.io/projected/0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0-kube-api-access-b9h6g\") pod \"openstack-operator-index-f2hhh\" (UID: \"0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0\") " pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.572098 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9h6g\" (UniqueName: \"kubernetes.io/projected/0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0-kube-api-access-b9h6g\") pod \"openstack-operator-index-f2hhh\" (UID: \"0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0\") " pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.608258 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9h6g\" (UniqueName: \"kubernetes.io/projected/0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0-kube-api-access-b9h6g\") pod \"openstack-operator-index-f2hhh\" (UID: \"0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0\") " pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:37 crc kubenswrapper[4863]: I1205 07:03:37.730863 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:38 crc kubenswrapper[4863]: I1205 07:03:38.156280 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f2hhh"] Dec 05 07:03:38 crc kubenswrapper[4863]: W1205 07:03:38.170957 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b62e8bc_0058_4440_a5ce_ab4c5a2f32d0.slice/crio-bac0f6dae9de0ebb28778b7a8bdd126bc7265422ad386e9ad2892300f7a1d4fb WatchSource:0}: Error finding container bac0f6dae9de0ebb28778b7a8bdd126bc7265422ad386e9ad2892300f7a1d4fb: Status 404 returned error can't find the container with id bac0f6dae9de0ebb28778b7a8bdd126bc7265422ad386e9ad2892300f7a1d4fb Dec 05 07:03:38 crc kubenswrapper[4863]: I1205 07:03:38.408834 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f2hhh" event={"ID":"0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0","Type":"ContainerStarted","Data":"bac0f6dae9de0ebb28778b7a8bdd126bc7265422ad386e9ad2892300f7a1d4fb"} Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.476617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f2hhh" event={"ID":"0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0","Type":"ContainerStarted","Data":"7c332cdb3a9c0316c94ea65956fe06381990c0357111f6b638852f720e1cd117"} Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.479115 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sk9th" event={"ID":"2d45c10f-a82a-40de-ae81-b6ed644124bb","Type":"ContainerStarted","Data":"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f"} Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.479357 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-sk9th" podUID="2d45c10f-a82a-40de-ae81-b6ed644124bb" containerName="registry-server" containerID="cri-o://66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f" gracePeriod=2 Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.504613 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-f2hhh" podStartSLOduration=2.162814275 podStartE2EDuration="9.504567136s" podCreationTimestamp="2025-12-05 07:03:37 +0000 UTC" firstStartedPulling="2025-12-05 07:03:38.173436352 +0000 UTC m=+1045.899433402" lastFinishedPulling="2025-12-05 07:03:45.515189213 +0000 UTC m=+1053.241186263" observedRunningTime="2025-12-05 07:03:46.495997949 +0000 UTC m=+1054.221994999" watchObservedRunningTime="2025-12-05 07:03:46.504567136 +0000 UTC m=+1054.230564186" Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.515524 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-sk9th" podStartSLOduration=2.412378856 podStartE2EDuration="13.51550207s" podCreationTimestamp="2025-12-05 07:03:33 +0000 UTC" firstStartedPulling="2025-12-05 07:03:34.170368203 +0000 UTC m=+1041.896365253" lastFinishedPulling="2025-12-05 07:03:45.273491417 +0000 UTC m=+1052.999488467" observedRunningTime="2025-12-05 07:03:46.511653577 +0000 UTC m=+1054.237650647" watchObservedRunningTime="2025-12-05 07:03:46.51550207 +0000 UTC m=+1054.241499120" Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.859924 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.884129 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfz2f\" (UniqueName: \"kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f\") pod \"2d45c10f-a82a-40de-ae81-b6ed644124bb\" (UID: \"2d45c10f-a82a-40de-ae81-b6ed644124bb\") " Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.889444 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f" (OuterVolumeSpecName: "kube-api-access-bfz2f") pod "2d45c10f-a82a-40de-ae81-b6ed644124bb" (UID: "2d45c10f-a82a-40de-ae81-b6ed644124bb"). InnerVolumeSpecName "kube-api-access-bfz2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:03:46 crc kubenswrapper[4863]: I1205 07:03:46.987369 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfz2f\" (UniqueName: \"kubernetes.io/projected/2d45c10f-a82a-40de-ae81-b6ed644124bb-kube-api-access-bfz2f\") on node \"crc\" DevicePath \"\"" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.488182 4863 generic.go:334] "Generic (PLEG): container finished" podID="2d45c10f-a82a-40de-ae81-b6ed644124bb" containerID="66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f" exitCode=0 Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.488259 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sk9th" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.488278 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sk9th" event={"ID":"2d45c10f-a82a-40de-ae81-b6ed644124bb","Type":"ContainerDied","Data":"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f"} Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.488364 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sk9th" event={"ID":"2d45c10f-a82a-40de-ae81-b6ed644124bb","Type":"ContainerDied","Data":"0644c4f2ce69b861bac3fffba7f2d415e7872fe3c0d3e608de3a74c8bb277967"} Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.488397 4863 scope.go:117] "RemoveContainer" containerID="66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.510518 4863 scope.go:117] "RemoveContainer" containerID="66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f" Dec 05 07:03:47 crc kubenswrapper[4863]: E1205 07:03:47.510999 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f\": container with ID starting with 66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f not found: ID does not exist" containerID="66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.511069 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f"} err="failed to get container status \"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f\": rpc error: code = NotFound desc = could not find container \"66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f\": container with ID starting with 66f9d90e2c45bfd99bc043a866e964819dc526eab089349e5f6b77ff2728526f not found: ID does not exist" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.522624 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.527854 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-sk9th"] Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.731667 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.731711 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:47 crc kubenswrapper[4863]: I1205 07:03:47.757918 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:03:48 crc kubenswrapper[4863]: I1205 07:03:48.611288 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d45c10f-a82a-40de-ae81-b6ed644124bb" path="/var/lib/kubelet/pods/2d45c10f-a82a-40de-ae81-b6ed644124bb/volumes" Dec 05 07:03:57 crc kubenswrapper[4863]: I1205 07:03:57.769732 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-f2hhh" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.532516 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb"] Dec 05 07:04:08 crc kubenswrapper[4863]: E1205 07:04:08.533204 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d45c10f-a82a-40de-ae81-b6ed644124bb" containerName="registry-server" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.533218 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d45c10f-a82a-40de-ae81-b6ed644124bb" containerName="registry-server" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.533385 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d45c10f-a82a-40de-ae81-b6ed644124bb" containerName="registry-server" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.536027 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.538733 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5c24l" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.543114 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb"] Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.616708 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.616781 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.616882 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bggfx\" (UniqueName: \"kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.717349 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.717409 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.717439 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bggfx\" (UniqueName: \"kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.718142 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.718251 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.752530 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bggfx\" (UniqueName: \"kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:08 crc kubenswrapper[4863]: I1205 07:04:08.856822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:09 crc kubenswrapper[4863]: I1205 07:04:09.327726 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb"] Dec 05 07:04:09 crc kubenswrapper[4863]: I1205 07:04:09.662295 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerID="d622c9555e67f49ae8fc3252b8c9b94fddc7122f214e448c680b7cde64e3ab45" exitCode=0 Dec 05 07:04:09 crc kubenswrapper[4863]: I1205 07:04:09.662676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" event={"ID":"3c341744-ef7e-4887-9c70-e8143f784a9f","Type":"ContainerDied","Data":"d622c9555e67f49ae8fc3252b8c9b94fddc7122f214e448c680b7cde64e3ab45"} Dec 05 07:04:09 crc kubenswrapper[4863]: I1205 07:04:09.662711 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" event={"ID":"3c341744-ef7e-4887-9c70-e8143f784a9f","Type":"ContainerStarted","Data":"84b0b5c7096e3d0867302c5926b325098f41c6ceadcdf9e830bc5549afad2a6b"} Dec 05 07:04:09 crc kubenswrapper[4863]: I1205 07:04:09.664034 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:04:10 crc kubenswrapper[4863]: I1205 07:04:10.674315 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerID="d63df3fc0d8c0b993970b8fd6d1e4cc6261a4203e92e665bc1237017731c4fd5" exitCode=0 Dec 05 07:04:10 crc kubenswrapper[4863]: I1205 07:04:10.674402 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" event={"ID":"3c341744-ef7e-4887-9c70-e8143f784a9f","Type":"ContainerDied","Data":"d63df3fc0d8c0b993970b8fd6d1e4cc6261a4203e92e665bc1237017731c4fd5"} Dec 05 07:04:11 crc kubenswrapper[4863]: I1205 07:04:11.686651 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerID="5500da2b56b513fe4513057a65b039bb7021b39da9127ea08d96da04df496ce8" exitCode=0 Dec 05 07:04:11 crc kubenswrapper[4863]: I1205 07:04:11.686714 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" event={"ID":"3c341744-ef7e-4887-9c70-e8143f784a9f","Type":"ContainerDied","Data":"5500da2b56b513fe4513057a65b039bb7021b39da9127ea08d96da04df496ce8"} Dec 05 07:04:12 crc kubenswrapper[4863]: I1205 07:04:12.982032 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.178817 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bggfx\" (UniqueName: \"kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx\") pod \"3c341744-ef7e-4887-9c70-e8143f784a9f\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.178979 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle\") pod \"3c341744-ef7e-4887-9c70-e8143f784a9f\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.179015 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util\") pod \"3c341744-ef7e-4887-9c70-e8143f784a9f\" (UID: \"3c341744-ef7e-4887-9c70-e8143f784a9f\") " Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.179872 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle" (OuterVolumeSpecName: "bundle") pod "3c341744-ef7e-4887-9c70-e8143f784a9f" (UID: "3c341744-ef7e-4887-9c70-e8143f784a9f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.187360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx" (OuterVolumeSpecName: "kube-api-access-bggfx") pod "3c341744-ef7e-4887-9c70-e8143f784a9f" (UID: "3c341744-ef7e-4887-9c70-e8143f784a9f"). InnerVolumeSpecName "kube-api-access-bggfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.202308 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util" (OuterVolumeSpecName: "util") pod "3c341744-ef7e-4887-9c70-e8143f784a9f" (UID: "3c341744-ef7e-4887-9c70-e8143f784a9f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.280879 4863 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.280925 4863 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c341744-ef7e-4887-9c70-e8143f784a9f-util\") on node \"crc\" DevicePath \"\"" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.280967 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bggfx\" (UniqueName: \"kubernetes.io/projected/3c341744-ef7e-4887-9c70-e8143f784a9f-kube-api-access-bggfx\") on node \"crc\" DevicePath \"\"" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.705770 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" event={"ID":"3c341744-ef7e-4887-9c70-e8143f784a9f","Type":"ContainerDied","Data":"84b0b5c7096e3d0867302c5926b325098f41c6ceadcdf9e830bc5549afad2a6b"} Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.705831 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b0b5c7096e3d0867302c5926b325098f41c6ceadcdf9e830bc5549afad2a6b" Dec 05 07:04:13 crc kubenswrapper[4863]: I1205 07:04:13.705867 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.526747 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q"] Dec 05 07:04:20 crc kubenswrapper[4863]: E1205 07:04:20.527607 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="extract" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.527623 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="extract" Dec 05 07:04:20 crc kubenswrapper[4863]: E1205 07:04:20.527641 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="util" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.527648 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="util" Dec 05 07:04:20 crc kubenswrapper[4863]: E1205 07:04:20.527673 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="pull" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.527682 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="pull" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.527804 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c341744-ef7e-4887-9c70-e8143f784a9f" containerName="extract" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.528270 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.530306 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-wsrtl" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.552108 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q"] Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.684798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2mzz\" (UniqueName: \"kubernetes.io/projected/9240a28f-333f-4619-b242-1a61f05d6aca-kube-api-access-g2mzz\") pod \"openstack-operator-controller-operator-55b6fb9447-wqs6q\" (UID: \"9240a28f-333f-4619-b242-1a61f05d6aca\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.785923 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2mzz\" (UniqueName: \"kubernetes.io/projected/9240a28f-333f-4619-b242-1a61f05d6aca-kube-api-access-g2mzz\") pod \"openstack-operator-controller-operator-55b6fb9447-wqs6q\" (UID: \"9240a28f-333f-4619-b242-1a61f05d6aca\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.815250 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2mzz\" (UniqueName: \"kubernetes.io/projected/9240a28f-333f-4619-b242-1a61f05d6aca-kube-api-access-g2mzz\") pod \"openstack-operator-controller-operator-55b6fb9447-wqs6q\" (UID: \"9240a28f-333f-4619-b242-1a61f05d6aca\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:20 crc kubenswrapper[4863]: I1205 07:04:20.844821 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:21 crc kubenswrapper[4863]: I1205 07:04:21.307679 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q"] Dec 05 07:04:21 crc kubenswrapper[4863]: I1205 07:04:21.766791 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" event={"ID":"9240a28f-333f-4619-b242-1a61f05d6aca","Type":"ContainerStarted","Data":"915cf61048d077b284fff9d4be60baa0c9b34567cc9ec85dd9b9574fdcbee540"} Dec 05 07:04:26 crc kubenswrapper[4863]: I1205 07:04:26.824733 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" event={"ID":"9240a28f-333f-4619-b242-1a61f05d6aca","Type":"ContainerStarted","Data":"84822b9c65eb2070572a3e7df6580d175a669703911fe981dfcd4ca8f677ae0a"} Dec 05 07:04:26 crc kubenswrapper[4863]: I1205 07:04:26.825692 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:04:26 crc kubenswrapper[4863]: I1205 07:04:26.876941 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" podStartSLOduration=2.129401482 podStartE2EDuration="6.876926067s" podCreationTimestamp="2025-12-05 07:04:20 +0000 UTC" firstStartedPulling="2025-12-05 07:04:21.31185335 +0000 UTC m=+1089.037850410" lastFinishedPulling="2025-12-05 07:04:26.059377955 +0000 UTC m=+1093.785374995" observedRunningTime="2025-12-05 07:04:26.872774658 +0000 UTC m=+1094.598771698" watchObservedRunningTime="2025-12-05 07:04:26.876926067 +0000 UTC m=+1094.602923107" Dec 05 07:04:38 crc kubenswrapper[4863]: I1205 07:04:38.464030 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:04:38 crc kubenswrapper[4863]: I1205 07:04:38.465762 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:04:40 crc kubenswrapper[4863]: I1205 07:04:40.848321 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-wqs6q" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.071581 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.073692 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.075580 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9hpxq" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.077156 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.078181 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.080007 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-h9857" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.081181 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.082103 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.083549 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-frrvh" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.102911 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.108949 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.149546 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.164547 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.165793 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.170276 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-d5rwl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.197026 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.203870 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.205050 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.214172 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-j2lr2" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.231186 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.231185 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x6tn\" (UniqueName: \"kubernetes.io/projected/cc74b559-6330-495d-b7a3-9582c1a0f935-kube-api-access-5x6tn\") pod \"cinder-operator-controller-manager-859b6ccc6-2dmqx\" (UID: \"cc74b559-6330-495d-b7a3-9582c1a0f935\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.231392 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgc6x\" (UniqueName: \"kubernetes.io/projected/0b841d1d-9d65-41e2-9db3-687320b8d75d-kube-api-access-mgc6x\") pod \"designate-operator-controller-manager-78b4bc895b-q6t65\" (UID: \"0b841d1d-9d65-41e2-9db3-687320b8d75d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.231493 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28hqc\" (UniqueName: \"kubernetes.io/projected/fa841a8c-8a88-4134-977a-059db7a51e45-kube-api-access-28hqc\") pod \"barbican-operator-controller-manager-7d9dfd778-gs2j5\" (UID: \"fa841a8c-8a88-4134-977a-059db7a51e45\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.243889 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.244833 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.248677 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-hznfl"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.249589 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.250121 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-2hqfq" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.255864 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.256026 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tct6p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.258540 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.278401 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-hznfl"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.303394 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.304696 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.318493 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-wjd8p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.327341 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.333002 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5qqn\" (UniqueName: \"kubernetes.io/projected/90a9e868-2b6b-4f2a-ba48-dd03855c47d9-kube-api-access-l5qqn\") pod \"glance-operator-controller-manager-77987cd8cd-jvwq8\" (UID: \"90a9e868-2b6b-4f2a-ba48-dd03855c47d9\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.333052 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x6tn\" (UniqueName: \"kubernetes.io/projected/cc74b559-6330-495d-b7a3-9582c1a0f935-kube-api-access-5x6tn\") pod \"cinder-operator-controller-manager-859b6ccc6-2dmqx\" (UID: \"cc74b559-6330-495d-b7a3-9582c1a0f935\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.333077 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgc6x\" (UniqueName: \"kubernetes.io/projected/0b841d1d-9d65-41e2-9db3-687320b8d75d-kube-api-access-mgc6x\") pod \"designate-operator-controller-manager-78b4bc895b-q6t65\" (UID: \"0b841d1d-9d65-41e2-9db3-687320b8d75d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.333102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28hqc\" (UniqueName: \"kubernetes.io/projected/fa841a8c-8a88-4134-977a-059db7a51e45-kube-api-access-28hqc\") pod \"barbican-operator-controller-manager-7d9dfd778-gs2j5\" (UID: \"fa841a8c-8a88-4134-977a-059db7a51e45\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.333127 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kl4r\" (UniqueName: \"kubernetes.io/projected/bc333aa4-954b-4d2d-8bba-8fec9631cecd-kube-api-access-7kl4r\") pod \"heat-operator-controller-manager-5f64f6f8bb-fm45v\" (UID: \"bc333aa4-954b-4d2d-8bba-8fec9631cecd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.337531 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.338450 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.345110 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-gwsmb" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.354648 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.355637 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.358662 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.358981 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hhf5r" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.368817 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgc6x\" (UniqueName: \"kubernetes.io/projected/0b841d1d-9d65-41e2-9db3-687320b8d75d-kube-api-access-mgc6x\") pod \"designate-operator-controller-manager-78b4bc895b-q6t65\" (UID: \"0b841d1d-9d65-41e2-9db3-687320b8d75d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.370069 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x6tn\" (UniqueName: \"kubernetes.io/projected/cc74b559-6330-495d-b7a3-9582c1a0f935-kube-api-access-5x6tn\") pod \"cinder-operator-controller-manager-859b6ccc6-2dmqx\" (UID: \"cc74b559-6330-495d-b7a3-9582c1a0f935\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.372994 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28hqc\" (UniqueName: \"kubernetes.io/projected/fa841a8c-8a88-4134-977a-059db7a51e45-kube-api-access-28hqc\") pod \"barbican-operator-controller-manager-7d9dfd778-gs2j5\" (UID: \"fa841a8c-8a88-4134-977a-059db7a51e45\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.380498 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.395625 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.411173 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.420443 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.427297 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.432635 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434259 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whct6\" (UniqueName: \"kubernetes.io/projected/95da9d4b-d38a-4a40-8e9e-282b0b9da2ef-kube-api-access-whct6\") pod \"ironic-operator-controller-manager-6c548fd776-mhtdw\" (UID: \"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434314 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt2c5\" (UniqueName: \"kubernetes.io/projected/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-kube-api-access-lt2c5\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434348 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5qqn\" (UniqueName: \"kubernetes.io/projected/90a9e868-2b6b-4f2a-ba48-dd03855c47d9-kube-api-access-l5qqn\") pod \"glance-operator-controller-manager-77987cd8cd-jvwq8\" (UID: \"90a9e868-2b6b-4f2a-ba48-dd03855c47d9\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434392 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434430 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kl4r\" (UniqueName: \"kubernetes.io/projected/bc333aa4-954b-4d2d-8bba-8fec9631cecd-kube-api-access-7kl4r\") pod \"heat-operator-controller-manager-5f64f6f8bb-fm45v\" (UID: \"bc333aa4-954b-4d2d-8bba-8fec9631cecd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434456 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82wbw\" (UniqueName: \"kubernetes.io/projected/d7017ca8-d0ed-4245-8786-be169d9dde3a-kube-api-access-82wbw\") pod \"manila-operator-controller-manager-7c79b5df47-bq7lw\" (UID: \"d7017ca8-d0ed-4245-8786-be169d9dde3a\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434503 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27nk7\" (UniqueName: \"kubernetes.io/projected/84668b75-595c-4382-bde5-a7561c200d50-kube-api-access-27nk7\") pod \"horizon-operator-controller-manager-68c6d99b8f-wsxg9\" (UID: \"84668b75-595c-4382-bde5-a7561c200d50\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.434822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.445773 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-tcq58" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.446095 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.446221 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-xrdb2" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.455345 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.477457 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.480136 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5qqn\" (UniqueName: \"kubernetes.io/projected/90a9e868-2b6b-4f2a-ba48-dd03855c47d9-kube-api-access-l5qqn\") pod \"glance-operator-controller-manager-77987cd8cd-jvwq8\" (UID: \"90a9e868-2b6b-4f2a-ba48-dd03855c47d9\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.496119 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.497342 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.498055 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.498205 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.502411 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lr784" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.512164 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-5nqmf" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.512736 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539218 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt2c5\" (UniqueName: \"kubernetes.io/projected/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-kube-api-access-lt2c5\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539262 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5b9l\" (UniqueName: \"kubernetes.io/projected/47454469-2c4d-4cbd-befa-eb137b5d4a1e-kube-api-access-n5b9l\") pod \"keystone-operator-controller-manager-7765d96ddf-l4gp5\" (UID: \"47454469-2c4d-4cbd-befa-eb137b5d4a1e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539300 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78xhs\" (UniqueName: \"kubernetes.io/projected/ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7-kube-api-access-78xhs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-dz59p\" (UID: \"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539321 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539349 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v54fh\" (UniqueName: \"kubernetes.io/projected/dd03c712-5c00-447e-a266-4dfe71e3938a-kube-api-access-v54fh\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hwzqx\" (UID: \"dd03c712-5c00-447e-a266-4dfe71e3938a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539360 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kl4r\" (UniqueName: \"kubernetes.io/projected/bc333aa4-954b-4d2d-8bba-8fec9631cecd-kube-api-access-7kl4r\") pod \"heat-operator-controller-manager-5f64f6f8bb-fm45v\" (UID: \"bc333aa4-954b-4d2d-8bba-8fec9631cecd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539379 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82wbw\" (UniqueName: \"kubernetes.io/projected/d7017ca8-d0ed-4245-8786-be169d9dde3a-kube-api-access-82wbw\") pod \"manila-operator-controller-manager-7c79b5df47-bq7lw\" (UID: \"d7017ca8-d0ed-4245-8786-be169d9dde3a\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539401 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27nk7\" (UniqueName: \"kubernetes.io/projected/84668b75-595c-4382-bde5-a7561c200d50-kube-api-access-27nk7\") pod \"horizon-operator-controller-manager-68c6d99b8f-wsxg9\" (UID: \"84668b75-595c-4382-bde5-a7561c200d50\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.539483 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whct6\" (UniqueName: \"kubernetes.io/projected/95da9d4b-d38a-4a40-8e9e-282b0b9da2ef-kube-api-access-whct6\") pod \"ironic-operator-controller-manager-6c548fd776-mhtdw\" (UID: \"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:00 crc kubenswrapper[4863]: E1205 07:05:00.539774 4863 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:00 crc kubenswrapper[4863]: E1205 07:05:00.539820 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert podName:9cd0a9a8-0623-45f9-84e4-cdc1a6132909 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:01.039803526 +0000 UTC m=+1128.765800566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert") pod "infra-operator-controller-manager-57548d458d-hznfl" (UID: "9cd0a9a8-0623-45f9-84e4-cdc1a6132909") : secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.540325 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.552910 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.552962 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.554142 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.561406 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.561765 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-zhsb4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.563275 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.602714 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-8cx62" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.638109 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82wbw\" (UniqueName: \"kubernetes.io/projected/d7017ca8-d0ed-4245-8786-be169d9dde3a-kube-api-access-82wbw\") pod \"manila-operator-controller-manager-7c79b5df47-bq7lw\" (UID: \"d7017ca8-d0ed-4245-8786-be169d9dde3a\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.640598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjwbh\" (UniqueName: \"kubernetes.io/projected/905df848-1d74-4ab5-b9a1-4660b651930b-kube-api-access-vjwbh\") pod \"octavia-operator-controller-manager-998648c74-pcpd5\" (UID: \"905df848-1d74-4ab5-b9a1-4660b651930b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.640630 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9z9v\" (UniqueName: \"kubernetes.io/projected/b850a070-2bf4-4163-9e18-0315e1f0b250-kube-api-access-l9z9v\") pod \"nova-operator-controller-manager-697bc559fc-kk8c5\" (UID: \"b850a070-2bf4-4163-9e18-0315e1f0b250\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.640690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5b9l\" (UniqueName: \"kubernetes.io/projected/47454469-2c4d-4cbd-befa-eb137b5d4a1e-kube-api-access-n5b9l\") pod \"keystone-operator-controller-manager-7765d96ddf-l4gp5\" (UID: \"47454469-2c4d-4cbd-befa-eb137b5d4a1e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.640728 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78xhs\" (UniqueName: \"kubernetes.io/projected/ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7-kube-api-access-78xhs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-dz59p\" (UID: \"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.640778 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v54fh\" (UniqueName: \"kubernetes.io/projected/dd03c712-5c00-447e-a266-4dfe71e3938a-kube-api-access-v54fh\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hwzqx\" (UID: \"dd03c712-5c00-447e-a266-4dfe71e3938a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.643857 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27nk7\" (UniqueName: \"kubernetes.io/projected/84668b75-595c-4382-bde5-a7561c200d50-kube-api-access-27nk7\") pod \"horizon-operator-controller-manager-68c6d99b8f-wsxg9\" (UID: \"84668b75-595c-4382-bde5-a7561c200d50\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.650272 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt2c5\" (UniqueName: \"kubernetes.io/projected/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-kube-api-access-lt2c5\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.650373 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whct6\" (UniqueName: \"kubernetes.io/projected/95da9d4b-d38a-4a40-8e9e-282b0b9da2ef-kube-api-access-whct6\") pod \"ironic-operator-controller-manager-6c548fd776-mhtdw\" (UID: \"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.694792 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.707671 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v54fh\" (UniqueName: \"kubernetes.io/projected/dd03c712-5c00-447e-a266-4dfe71e3938a-kube-api-access-v54fh\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hwzqx\" (UID: \"dd03c712-5c00-447e-a266-4dfe71e3938a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.723536 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5b9l\" (UniqueName: \"kubernetes.io/projected/47454469-2c4d-4cbd-befa-eb137b5d4a1e-kube-api-access-n5b9l\") pod \"keystone-operator-controller-manager-7765d96ddf-l4gp5\" (UID: \"47454469-2c4d-4cbd-befa-eb137b5d4a1e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.731519 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78xhs\" (UniqueName: \"kubernetes.io/projected/ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7-kube-api-access-78xhs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-dz59p\" (UID: \"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.742225 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h548\" (UniqueName: \"kubernetes.io/projected/429debdd-9c1f-4af0-905e-899a846b5711-kube-api-access-8h548\") pod \"ovn-operator-controller-manager-b6456fdb6-jk9p4\" (UID: \"429debdd-9c1f-4af0-905e-899a846b5711\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.742287 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg4fn\" (UniqueName: \"kubernetes.io/projected/aa38ed5a-a0a4-4a7d-9220-6d093163bb5b-kube-api-access-lg4fn\") pod \"placement-operator-controller-manager-78f8948974-vbbf4\" (UID: \"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.742346 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjwbh\" (UniqueName: \"kubernetes.io/projected/905df848-1d74-4ab5-b9a1-4660b651930b-kube-api-access-vjwbh\") pod \"octavia-operator-controller-manager-998648c74-pcpd5\" (UID: \"905df848-1d74-4ab5-b9a1-4660b651930b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.742365 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9z9v\" (UniqueName: \"kubernetes.io/projected/b850a070-2bf4-4163-9e18-0315e1f0b250-kube-api-access-l9z9v\") pod \"nova-operator-controller-manager-697bc559fc-kk8c5\" (UID: \"b850a070-2bf4-4163-9e18-0315e1f0b250\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.744337 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.744371 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.745321 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.757770 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-f9rp7" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.762890 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.763332 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.785454 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.790488 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjwbh\" (UniqueName: \"kubernetes.io/projected/905df848-1d74-4ab5-b9a1-4660b651930b-kube-api-access-vjwbh\") pod \"octavia-operator-controller-manager-998648c74-pcpd5\" (UID: \"905df848-1d74-4ab5-b9a1-4660b651930b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.791093 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.801815 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.806526 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.810424 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9z9v\" (UniqueName: \"kubernetes.io/projected/b850a070-2bf4-4163-9e18-0315e1f0b250-kube-api-access-l9z9v\") pod \"nova-operator-controller-manager-697bc559fc-kk8c5\" (UID: \"b850a070-2bf4-4163-9e18-0315e1f0b250\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.813056 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.813732 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.822141 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.830417 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-27twb" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.843448 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfzkp\" (UniqueName: \"kubernetes.io/projected/3830a1fc-64ea-4860-9324-9f71dba749f3-kube-api-access-lfzkp\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.843550 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h548\" (UniqueName: \"kubernetes.io/projected/429debdd-9c1f-4af0-905e-899a846b5711-kube-api-access-8h548\") pod \"ovn-operator-controller-manager-b6456fdb6-jk9p4\" (UID: \"429debdd-9c1f-4af0-905e-899a846b5711\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.843592 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.843611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg4fn\" (UniqueName: \"kubernetes.io/projected/aa38ed5a-a0a4-4a7d-9220-6d093163bb5b-kube-api-access-lg4fn\") pod \"placement-operator-controller-manager-78f8948974-vbbf4\" (UID: \"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.849969 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.853196 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.868215 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.879053 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.880021 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.891856 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.897097 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h548\" (UniqueName: \"kubernetes.io/projected/429debdd-9c1f-4af0-905e-899a846b5711-kube-api-access-8h548\") pod \"ovn-operator-controller-manager-b6456fdb6-jk9p4\" (UID: \"429debdd-9c1f-4af0-905e-899a846b5711\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.904745 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.906879 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-kdslf" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.914244 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.926307 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg4fn\" (UniqueName: \"kubernetes.io/projected/aa38ed5a-a0a4-4a7d-9220-6d093163bb5b-kube-api-access-lg4fn\") pod \"placement-operator-controller-manager-78f8948974-vbbf4\" (UID: \"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.935754 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.944408 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfzkp\" (UniqueName: \"kubernetes.io/projected/3830a1fc-64ea-4860-9324-9f71dba749f3-kube-api-access-lfzkp\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.944465 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cptlv\" (UniqueName: \"kubernetes.io/projected/e17b5c94-e94d-4102-8000-fa7d3c939caf-kube-api-access-cptlv\") pod \"test-operator-controller-manager-5854674fcc-2wdhl\" (UID: \"e17b5c94-e94d-4102-8000-fa7d3c939caf\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.944560 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.944585 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kdhd\" (UniqueName: \"kubernetes.io/projected/5e458dfe-ff58-4c00-b65d-69d634abf798-kube-api-access-6kdhd\") pod \"swift-operator-controller-manager-5f8c65bbfc-pr9bb\" (UID: \"5e458dfe-ff58-4c00-b65d-69d634abf798\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.944610 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rt7rs\" (UniqueName: \"kubernetes.io/projected/b683a8d2-9503-4660-8a70-d28bc5b9f75b-kube-api-access-rt7rs\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rgdk8\" (UID: \"b683a8d2-9503-4660-8a70-d28bc5b9f75b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:00 crc kubenswrapper[4863]: E1205 07:05:00.945058 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:00 crc kubenswrapper[4863]: E1205 07:05:00.945094 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:01.445082342 +0000 UTC m=+1129.171079382 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.948729 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-r4bwk" Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.956835 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl"] Dec 05 07:05:00 crc kubenswrapper[4863]: I1205 07:05:00.986644 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfzkp\" (UniqueName: \"kubernetes.io/projected/3830a1fc-64ea-4860-9324-9f71dba749f3-kube-api-access-lfzkp\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.006811 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.007938 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.015350 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.015650 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.016120 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-qrg9s" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.046275 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.046704 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cptlv\" (UniqueName: \"kubernetes.io/projected/e17b5c94-e94d-4102-8000-fa7d3c939caf-kube-api-access-cptlv\") pod \"test-operator-controller-manager-5854674fcc-2wdhl\" (UID: \"e17b5c94-e94d-4102-8000-fa7d3c939caf\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.046762 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.046805 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kdhd\" (UniqueName: \"kubernetes.io/projected/5e458dfe-ff58-4c00-b65d-69d634abf798-kube-api-access-6kdhd\") pod \"swift-operator-controller-manager-5f8c65bbfc-pr9bb\" (UID: \"5e458dfe-ff58-4c00-b65d-69d634abf798\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.046834 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rt7rs\" (UniqueName: \"kubernetes.io/projected/b683a8d2-9503-4660-8a70-d28bc5b9f75b-kube-api-access-rt7rs\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rgdk8\" (UID: \"b683a8d2-9503-4660-8a70-d28bc5b9f75b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.047304 4863 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.047350 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert podName:9cd0a9a8-0623-45f9-84e4-cdc1a6132909 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:02.047336668 +0000 UTC m=+1129.773333708 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert") pod "infra-operator-controller-manager-57548d458d-hznfl" (UID: "9cd0a9a8-0623-45f9-84e4-cdc1a6132909") : secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.068813 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.069964 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.072220 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.072385 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-nvw8w" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.076765 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kdhd\" (UniqueName: \"kubernetes.io/projected/5e458dfe-ff58-4c00-b65d-69d634abf798-kube-api-access-6kdhd\") pod \"swift-operator-controller-manager-5f8c65bbfc-pr9bb\" (UID: \"5e458dfe-ff58-4c00-b65d-69d634abf798\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.080499 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.082325 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.086819 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.095810 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cptlv\" (UniqueName: \"kubernetes.io/projected/e17b5c94-e94d-4102-8000-fa7d3c939caf-kube-api-access-cptlv\") pod \"test-operator-controller-manager-5854674fcc-2wdhl\" (UID: \"e17b5c94-e94d-4102-8000-fa7d3c939caf\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.095903 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rt7rs\" (UniqueName: \"kubernetes.io/projected/b683a8d2-9503-4660-8a70-d28bc5b9f75b-kube-api-access-rt7rs\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rgdk8\" (UID: \"b683a8d2-9503-4660-8a70-d28bc5b9f75b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.141356 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.143647 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.146317 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-gl27t" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.147755 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.147784 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mpnb\" (UniqueName: \"kubernetes.io/projected/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-kube-api-access-9mpnb\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.147832 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkvs5\" (UniqueName: \"kubernetes.io/projected/0c7adcb5-dc00-4705-b753-0c695c580367-kube-api-access-wkvs5\") pod \"watcher-operator-controller-manager-769dc69bc-rqgb7\" (UID: \"0c7adcb5-dc00-4705-b753-0c695c580367\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.147880 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.153650 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.155163 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.214867 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.218348 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.232399 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.249871 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kws7d\" (UniqueName: \"kubernetes.io/projected/6cba313f-ee15-470e-8dcb-8251b6bfb52a-kube-api-access-kws7d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hbld6\" (UID: \"6cba313f-ee15-470e-8dcb-8251b6bfb52a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.249929 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.249964 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mpnb\" (UniqueName: \"kubernetes.io/projected/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-kube-api-access-9mpnb\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.250028 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkvs5\" (UniqueName: \"kubernetes.io/projected/0c7adcb5-dc00-4705-b753-0c695c580367-kube-api-access-wkvs5\") pod \"watcher-operator-controller-manager-769dc69bc-rqgb7\" (UID: \"0c7adcb5-dc00-4705-b753-0c695c580367\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.250094 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.250256 4863 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.250316 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:01.750297914 +0000 UTC m=+1129.476294954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.250610 4863 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.250641 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:01.750631673 +0000 UTC m=+1129.476628713 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "metrics-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.255776 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.282567 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkvs5\" (UniqueName: \"kubernetes.io/projected/0c7adcb5-dc00-4705-b753-0c695c580367-kube-api-access-wkvs5\") pod \"watcher-operator-controller-manager-769dc69bc-rqgb7\" (UID: \"0c7adcb5-dc00-4705-b753-0c695c580367\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.289093 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mpnb\" (UniqueName: \"kubernetes.io/projected/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-kube-api-access-9mpnb\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.352544 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kws7d\" (UniqueName: \"kubernetes.io/projected/6cba313f-ee15-470e-8dcb-8251b6bfb52a-kube-api-access-kws7d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hbld6\" (UID: \"6cba313f-ee15-470e-8dcb-8251b6bfb52a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.363364 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.379196 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.387355 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kws7d\" (UniqueName: \"kubernetes.io/projected/6cba313f-ee15-470e-8dcb-8251b6bfb52a-kube-api-access-kws7d\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hbld6\" (UID: \"6cba313f-ee15-470e-8dcb-8251b6bfb52a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.421706 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.459679 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.460274 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.460334 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:02.460313681 +0000 UTC m=+1130.186310721 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.470652 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.532884 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.596051 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p"] Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.603116 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v"] Dec 05 07:05:01 crc kubenswrapper[4863]: W1205 07:05:01.703296 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebcc8b51_cd3e_4ce3_9b4b_6879f22efef7.slice/crio-cf447b148f79fd625ffa80bdc20d96953954f241ade7bb452b9eeea77c310c76 WatchSource:0}: Error finding container cf447b148f79fd625ffa80bdc20d96953954f241ade7bb452b9eeea77c310c76: Status 404 returned error can't find the container with id cf447b148f79fd625ffa80bdc20d96953954f241ade7bb452b9eeea77c310c76 Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.766728 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.766874 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.767069 4863 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.767137 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:02.767120563 +0000 UTC m=+1130.493117603 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "webhook-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.767346 4863 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: E1205 07:05:01.767638 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:02.767621445 +0000 UTC m=+1130.493618485 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "metrics-server-cert" not found Dec 05 07:05:01 crc kubenswrapper[4863]: I1205 07:05:01.965313 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.070384 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.070520 4863 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.070575 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert podName:9cd0a9a8-0623-45f9-84e4-cdc1a6132909 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:04.070561772 +0000 UTC m=+1131.796558812 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert") pod "infra-operator-controller-manager-57548d458d-hznfl" (UID: "9cd0a9a8-0623-45f9-84e4-cdc1a6132909") : secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.112153 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" event={"ID":"bc333aa4-954b-4d2d-8bba-8fec9631cecd","Type":"ContainerStarted","Data":"02cce0ada58931d3e37a7dea3b9e8e407aa91c13aa5aa9a50281e39ee19acbe4"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.113016 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.113680 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" event={"ID":"d7017ca8-d0ed-4245-8786-be169d9dde3a","Type":"ContainerStarted","Data":"12cd452d0e24841ad8b9c49421f62f8fe69668894721f50117c813d2771687d5"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.118059 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.118966 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" event={"ID":"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7","Type":"ContainerStarted","Data":"cf447b148f79fd625ffa80bdc20d96953954f241ade7bb452b9eeea77c310c76"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.121860 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.130519 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" event={"ID":"90a9e868-2b6b-4f2a-ba48-dd03855c47d9","Type":"ContainerStarted","Data":"e4216fa5fde61d194fe56208dd0c40a7d42429c92fc738a83b3a41ff18d7fd73"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.135159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" event={"ID":"47454469-2c4d-4cbd-befa-eb137b5d4a1e","Type":"ContainerStarted","Data":"24af0cf88fadfc0573176d09231c12a2179879951c81f5ce6deb9420fdb2cac1"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.136171 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.145482 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" event={"ID":"cc74b559-6330-495d-b7a3-9582c1a0f935","Type":"ContainerStarted","Data":"97ddc6ca15f3094c220b2988df0836afe2b3ac502a7da1eabef41c3bdbd69b09"} Dec 05 07:05:02 crc kubenswrapper[4863]: W1205 07:05:02.157159 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa38ed5a_a0a4_4a7d_9220_6d093163bb5b.slice/crio-0528737228fbbca8739f0c64f6127480b706f90297af3f209750df365f0c0b94 WatchSource:0}: Error finding container 0528737228fbbca8739f0c64f6127480b706f90297af3f209750df365f0c0b94: Status 404 returned error can't find the container with id 0528737228fbbca8739f0c64f6127480b706f90297af3f209750df365f0c0b94 Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.172406 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" event={"ID":"fa841a8c-8a88-4134-977a-059db7a51e45","Type":"ContainerStarted","Data":"f767a3429a60cb4e3574a7b2e074d3211dffab9d729fb9bec606640c74b9bb89"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.173961 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" event={"ID":"0b841d1d-9d65-41e2-9db3-687320b8d75d","Type":"ContainerStarted","Data":"ad02fe09cbbed828fadf83981a1fa1984d2d2673d94c973ee1dbe212fe750dab"} Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.299445 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.315661 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.325582 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4"] Dec 05 07:05:02 crc kubenswrapper[4863]: W1205 07:05:02.339164 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb850a070_2bf4_4163_9e18_0315e1f0b250.slice/crio-ca1cb44029a65698885a9481facab4a55144518da4ea4f2138169bba636add17 WatchSource:0}: Error finding container ca1cb44029a65698885a9481facab4a55144518da4ea4f2138169bba636add17: Status 404 returned error can't find the container with id ca1cb44029a65698885a9481facab4a55144518da4ea4f2138169bba636add17 Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.344050 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb"] Dec 05 07:05:02 crc kubenswrapper[4863]: W1205 07:05:02.344587 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod429debdd_9c1f_4af0_905e_899a846b5711.slice/crio-acb17c35babb13db437f41c4880d236309062b354c6a3bb0e06ddadff02e9b1b WatchSource:0}: Error finding container acb17c35babb13db437f41c4880d236309062b354c6a3bb0e06ddadff02e9b1b: Status 404 returned error can't find the container with id acb17c35babb13db437f41c4880d236309062b354c6a3bb0e06ddadff02e9b1b Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.353626 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8h548,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-jk9p4_openstack-operators(429debdd-9c1f-4af0-905e-899a846b5711): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.355352 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8h548,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-jk9p4_openstack-operators(429debdd-9c1f-4af0-905e-899a846b5711): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.356442 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" podUID="429debdd-9c1f-4af0-905e-899a846b5711" Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.430007 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.434974 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7"] Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.445260 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wkvs5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-rqgb7_openstack-operators(0c7adcb5-dc00-4705-b753-0c695c580367): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.445911 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kws7d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-hbld6_openstack-operators(6cba313f-ee15-470e-8dcb-8251b6bfb52a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.447554 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" podUID="6cba313f-ee15-470e-8dcb-8251b6bfb52a" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.451274 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wkvs5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-rqgb7_openstack-operators(0c7adcb5-dc00-4705-b753-0c695c580367): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.452378 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl"] Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.452438 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" podUID="0c7adcb5-dc00-4705-b753-0c695c580367" Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.457719 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8"] Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.475945 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.476157 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.476201 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:04.476189197 +0000 UTC m=+1132.202186237 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.478714 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cptlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-2wdhl_openstack-operators(e17b5c94-e94d-4102-8000-fa7d3c939caf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.485927 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cptlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-2wdhl_openstack-operators(e17b5c94-e94d-4102-8000-fa7d3c939caf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.487066 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" podUID="e17b5c94-e94d-4102-8000-fa7d3c939caf" Dec 05 07:05:02 crc kubenswrapper[4863]: W1205 07:05:02.491752 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb683a8d2_9503_4660_8a70_d28bc5b9f75b.slice/crio-a80d581bd9752804df98433417a86753f237aa2ef1e396a854b35986d018e4d8 WatchSource:0}: Error finding container a80d581bd9752804df98433417a86753f237aa2ef1e396a854b35986d018e4d8: Status 404 returned error can't find the container with id a80d581bd9752804df98433417a86753f237aa2ef1e396a854b35986d018e4d8 Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.785739 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:02 crc kubenswrapper[4863]: I1205 07:05:02.785843 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.785998 4863 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.786046 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:04.786032151 +0000 UTC m=+1132.512029191 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "metrics-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.786804 4863 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 07:05:02 crc kubenswrapper[4863]: E1205 07:05:02.786842 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:04.786832641 +0000 UTC m=+1132.512829681 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "webhook-server-cert" not found Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.192952 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" event={"ID":"5e458dfe-ff58-4c00-b65d-69d634abf798","Type":"ContainerStarted","Data":"13be60aa601ed96a77e0bbc77b923199477f704b47f6fd1c7eac43aaabd631ed"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.197140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" event={"ID":"b850a070-2bf4-4163-9e18-0315e1f0b250","Type":"ContainerStarted","Data":"ca1cb44029a65698885a9481facab4a55144518da4ea4f2138169bba636add17"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.200117 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" event={"ID":"905df848-1d74-4ab5-b9a1-4660b651930b","Type":"ContainerStarted","Data":"19c31864e53bf56fc9b48583c42b2c105f5c696c0167d90c38382159ed18548c"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.202667 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" event={"ID":"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef","Type":"ContainerStarted","Data":"30122c451f0c4c503f21c0b405c23cd9720b9dbac3357c7600fc9663904ef809"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.204345 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" event={"ID":"0c7adcb5-dc00-4705-b753-0c695c580367","Type":"ContainerStarted","Data":"ad68ebf39afa849f26df51e096d6bf80697015fa439a3b8fd22af1513f2529ca"} Dec 05 07:05:03 crc kubenswrapper[4863]: E1205 07:05:03.215206 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" podUID="0c7adcb5-dc00-4705-b753-0c695c580367" Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.222745 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" event={"ID":"b683a8d2-9503-4660-8a70-d28bc5b9f75b","Type":"ContainerStarted","Data":"a80d581bd9752804df98433417a86753f237aa2ef1e396a854b35986d018e4d8"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.243303 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" event={"ID":"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b","Type":"ContainerStarted","Data":"0528737228fbbca8739f0c64f6127480b706f90297af3f209750df365f0c0b94"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.247956 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" event={"ID":"84668b75-595c-4382-bde5-a7561c200d50","Type":"ContainerStarted","Data":"7a074a809defd9784287144d826459d60b0f97a09c67b3bba74e16e447e25d14"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.256485 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" event={"ID":"dd03c712-5c00-447e-a266-4dfe71e3938a","Type":"ContainerStarted","Data":"7760988aad20d7ec9b48d1ab5f4eac18d8c521887cd61b1537df3938a5a0faa8"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.262566 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" event={"ID":"6cba313f-ee15-470e-8dcb-8251b6bfb52a","Type":"ContainerStarted","Data":"82606d4f36ea114ebeaab91f66dd48560b610481f274adc507d3fbc72afea3af"} Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.264586 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" event={"ID":"429debdd-9c1f-4af0-905e-899a846b5711","Type":"ContainerStarted","Data":"acb17c35babb13db437f41c4880d236309062b354c6a3bb0e06ddadff02e9b1b"} Dec 05 07:05:03 crc kubenswrapper[4863]: E1205 07:05:03.265965 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" podUID="6cba313f-ee15-470e-8dcb-8251b6bfb52a" Dec 05 07:05:03 crc kubenswrapper[4863]: E1205 07:05:03.269067 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" podUID="429debdd-9c1f-4af0-905e-899a846b5711" Dec 05 07:05:03 crc kubenswrapper[4863]: I1205 07:05:03.269969 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" event={"ID":"e17b5c94-e94d-4102-8000-fa7d3c939caf","Type":"ContainerStarted","Data":"bf76f0280356319d194828c9e9dbeca618dc87c2cf71b41c08438487824c8741"} Dec 05 07:05:03 crc kubenswrapper[4863]: E1205 07:05:03.272669 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" podUID="e17b5c94-e94d-4102-8000-fa7d3c939caf" Dec 05 07:05:04 crc kubenswrapper[4863]: I1205 07:05:04.119842 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.120059 4863 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.120111 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert podName:9cd0a9a8-0623-45f9-84e4-cdc1a6132909 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:08.120094393 +0000 UTC m=+1135.846091443 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert") pod "infra-operator-controller-manager-57548d458d-hznfl" (UID: "9cd0a9a8-0623-45f9-84e4-cdc1a6132909") : secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.278933 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" podUID="6cba313f-ee15-470e-8dcb-8251b6bfb52a" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.281664 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" podUID="e17b5c94-e94d-4102-8000-fa7d3c939caf" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.282033 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" podUID="0c7adcb5-dc00-4705-b753-0c695c580367" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.282097 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" podUID="429debdd-9c1f-4af0-905e-899a846b5711" Dec 05 07:05:04 crc kubenswrapper[4863]: I1205 07:05:04.525244 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.525449 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.525510 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:08.525496003 +0000 UTC m=+1136.251493033 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: I1205 07:05:04.831453 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:04 crc kubenswrapper[4863]: I1205 07:05:04.832062 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.831583 4863 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.832426 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:08.832391176 +0000 UTC m=+1136.558388216 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "webhook-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.832312 4863 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 07:05:04 crc kubenswrapper[4863]: E1205 07:05:04.832916 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:08.832906918 +0000 UTC m=+1136.558903958 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "metrics-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.178277 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.178424 4863 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.178746 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert podName:9cd0a9a8-0623-45f9-84e4-cdc1a6132909 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:16.178725907 +0000 UTC m=+1143.904722947 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert") pod "infra-operator-controller-manager-57548d458d-hznfl" (UID: "9cd0a9a8-0623-45f9-84e4-cdc1a6132909") : secret "infra-operator-webhook-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.465702 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.465778 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.584237 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.584423 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.584538 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:16.584517525 +0000 UTC m=+1144.310514565 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.889600 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:08 crc kubenswrapper[4863]: I1205 07:05:08.889721 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.889806 4863 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.889876 4863 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.889880 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:16.889861061 +0000 UTC m=+1144.615858111 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "metrics-server-cert" not found Dec 05 07:05:08 crc kubenswrapper[4863]: E1205 07:05:08.889942 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs podName:4b87a8eb-ebb1-450b-bc8c-cd307695f1c4 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:16.889928072 +0000 UTC m=+1144.615925112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-thxkr" (UID: "4b87a8eb-ebb1-450b-bc8c-cd307695f1c4") : secret "webhook-server-cert" not found Dec 05 07:05:15 crc kubenswrapper[4863]: E1205 07:05:15.174534 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 05 07:05:15 crc kubenswrapper[4863]: E1205 07:05:15.175209 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l5qqn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-jvwq8_openstack-operators(90a9e868-2b6b-4f2a-ba48-dd03855c47d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.206381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.219055 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9cd0a9a8-0623-45f9-84e4-cdc1a6132909-cert\") pod \"infra-operator-controller-manager-57548d458d-hznfl\" (UID: \"9cd0a9a8-0623-45f9-84e4-cdc1a6132909\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.479832 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tct6p" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.487573 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.612505 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:16 crc kubenswrapper[4863]: E1205 07:05:16.612776 4863 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:16 crc kubenswrapper[4863]: E1205 07:05:16.612888 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert podName:3830a1fc-64ea-4860-9324-9f71dba749f3 nodeName:}" failed. No retries permitted until 2025-12-05 07:05:32.612856288 +0000 UTC m=+1160.338853368 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" (UID: "3830a1fc-64ea-4860-9324-9f71dba749f3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 07:05:16 crc kubenswrapper[4863]: E1205 07:05:16.772283 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 05 07:05:16 crc kubenswrapper[4863]: E1205 07:05:16.772547 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-27nk7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-wsxg9_openstack-operators(84668b75-595c-4382-bde5-a7561c200d50): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.919053 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.919162 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.927211 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:16 crc kubenswrapper[4863]: I1205 07:05:16.931251 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4b87a8eb-ebb1-450b-bc8c-cd307695f1c4-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-thxkr\" (UID: \"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:17 crc kubenswrapper[4863]: I1205 07:05:17.023157 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-nvw8w" Dec 05 07:05:17 crc kubenswrapper[4863]: I1205 07:05:17.031770 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:17 crc kubenswrapper[4863]: E1205 07:05:17.408892 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 05 07:05:17 crc kubenswrapper[4863]: E1205 07:05:17.409100 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6kdhd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-pr9bb_openstack-operators(5e458dfe-ff58-4c00-b65d-69d634abf798): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:18 crc kubenswrapper[4863]: E1205 07:05:18.023792 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 05 07:05:18 crc kubenswrapper[4863]: E1205 07:05:18.024316 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rt7rs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-rgdk8_openstack-operators(b683a8d2-9503-4660-8a70-d28bc5b9f75b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:18 crc kubenswrapper[4863]: E1205 07:05:18.727990 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 05 07:05:18 crc kubenswrapper[4863]: E1205 07:05:18.728794 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vjwbh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-pcpd5_openstack-operators(905df848-1d74-4ab5-b9a1-4660b651930b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:20 crc kubenswrapper[4863]: E1205 07:05:20.576811 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 05 07:05:20 crc kubenswrapper[4863]: E1205 07:05:20.577218 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whct6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-mhtdw_openstack-operators(95da9d4b-d38a-4a40-8e9e-282b0b9da2ef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:21 crc kubenswrapper[4863]: E1205 07:05:21.093252 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 07:05:21 crc kubenswrapper[4863]: E1205 07:05:21.093526 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-28hqc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-gs2j5_openstack-operators(fa841a8c-8a88-4134-977a-059db7a51e45): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:21 crc kubenswrapper[4863]: E1205 07:05:21.511815 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 05 07:05:21 crc kubenswrapper[4863]: E1205 07:05:21.511997 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v54fh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-hwzqx_openstack-operators(dd03c712-5c00-447e-a266-4dfe71e3938a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:22 crc kubenswrapper[4863]: E1205 07:05:22.482815 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 07:05:22 crc kubenswrapper[4863]: E1205 07:05:22.482984 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l9z9v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-kk8c5_openstack-operators(b850a070-2bf4-4163-9e18-0315e1f0b250): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:24 crc kubenswrapper[4863]: E1205 07:05:24.165557 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 07:05:24 crc kubenswrapper[4863]: E1205 07:05:24.166137 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n5b9l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-l4gp5_openstack-operators(47454469-2c4d-4cbd-befa-eb137b5d4a1e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:05:25 crc kubenswrapper[4863]: I1205 07:05:25.397320 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-hznfl"] Dec 05 07:05:25 crc kubenswrapper[4863]: I1205 07:05:25.969773 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr"] Dec 05 07:05:26 crc kubenswrapper[4863]: W1205 07:05:26.065792 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b87a8eb_ebb1_450b_bc8c_cd307695f1c4.slice/crio-2444bea42270ea06f481976e5b7799d465df70dae627e9a492c715c603f2c760 WatchSource:0}: Error finding container 2444bea42270ea06f481976e5b7799d465df70dae627e9a492c715c603f2c760: Status 404 returned error can't find the container with id 2444bea42270ea06f481976e5b7799d465df70dae627e9a492c715c603f2c760 Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.470119 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" event={"ID":"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7","Type":"ContainerStarted","Data":"1ab7f6035858061c5d9d4ab7db0bd258cbc97dffdd10e1aeeedaee3fec0c39ce"} Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.486260 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" event={"ID":"cc74b559-6330-495d-b7a3-9582c1a0f935","Type":"ContainerStarted","Data":"f91814e82e9ec361122f459a9cc9373d863de92866f111417296cea85ad27c73"} Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.495026 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" event={"ID":"9cd0a9a8-0623-45f9-84e4-cdc1a6132909","Type":"ContainerStarted","Data":"92731b1b2a4afa905f2f1b05936bdcfbc403d71eb713bafd22c058058c365d85"} Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.497211 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" event={"ID":"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4","Type":"ContainerStarted","Data":"2444bea42270ea06f481976e5b7799d465df70dae627e9a492c715c603f2c760"} Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.506397 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" event={"ID":"bc333aa4-954b-4d2d-8bba-8fec9631cecd","Type":"ContainerStarted","Data":"6273b4aaafc32930bbd13fcf14d7cc3bad99e69541e6478f08144ff8d33fb672"} Dec 05 07:05:26 crc kubenswrapper[4863]: I1205 07:05:26.509035 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" event={"ID":"d7017ca8-d0ed-4245-8786-be169d9dde3a","Type":"ContainerStarted","Data":"f1f0ff65387a487c524c6370fb63433de6f576927f14f63cdad5578ed99c7280"} Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.527095 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" event={"ID":"0c7adcb5-dc00-4705-b753-0c695c580367","Type":"ContainerStarted","Data":"0fdc29af8a33045aa5560c15f52f6a6982edf12378b7c7b3c143135984a2ef7d"} Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.530978 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" event={"ID":"429debdd-9c1f-4af0-905e-899a846b5711","Type":"ContainerStarted","Data":"e55374f02ddd2f5b8523c094fc3d2eda358ef76490a7ae70f2ab0a86c35eb682"} Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.532385 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" event={"ID":"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b","Type":"ContainerStarted","Data":"334ccd75a35d333a139aadc07758e875faa1486ec8d43fd06acfd38f5c2266e2"} Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.534604 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" event={"ID":"4b87a8eb-ebb1-450b-bc8c-cd307695f1c4","Type":"ContainerStarted","Data":"0192d15f64eebd54f84ac36cf5c5f47a3dc0c4c85c04e4132fcb4c1aa6216aff"} Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.534665 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:27 crc kubenswrapper[4863]: I1205 07:05:27.536218 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" event={"ID":"0b841d1d-9d65-41e2-9db3-687320b8d75d","Type":"ContainerStarted","Data":"ec55a6d60dde84be496f851d070d38780239eddef29c4c8109cd8832a7877fb8"} Dec 05 07:05:28 crc kubenswrapper[4863]: E1205 07:05:28.886317 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" podUID="84668b75-595c-4382-bde5-a7561c200d50" Dec 05 07:05:28 crc kubenswrapper[4863]: E1205 07:05:28.944055 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" podUID="90a9e868-2b6b-4f2a-ba48-dd03855c47d9" Dec 05 07:05:28 crc kubenswrapper[4863]: E1205 07:05:28.969501 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" podUID="fa841a8c-8a88-4134-977a-059db7a51e45" Dec 05 07:05:28 crc kubenswrapper[4863]: E1205 07:05:28.972427 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" podUID="5e458dfe-ff58-4c00-b65d-69d634abf798" Dec 05 07:05:28 crc kubenswrapper[4863]: E1205 07:05:28.980252 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" podUID="905df848-1d74-4ab5-b9a1-4660b651930b" Dec 05 07:05:29 crc kubenswrapper[4863]: E1205 07:05:29.117152 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" podUID="dd03c712-5c00-447e-a266-4dfe71e3938a" Dec 05 07:05:29 crc kubenswrapper[4863]: E1205 07:05:29.173711 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" podUID="b850a070-2bf4-4163-9e18-0315e1f0b250" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.549671 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" event={"ID":"5e458dfe-ff58-4c00-b65d-69d634abf798","Type":"ContainerStarted","Data":"3ecfa4851d2554c2f6687a450d4dea7c16c921d97473461f8719013c083c5646"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.551860 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" event={"ID":"dd03c712-5c00-447e-a266-4dfe71e3938a","Type":"ContainerStarted","Data":"3d23676b1cbe398e8c541dfd9920247278be635c7575388c5e2a84d267286b24"} Dec 05 07:05:29 crc kubenswrapper[4863]: E1205 07:05:29.553537 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" podUID="dd03c712-5c00-447e-a266-4dfe71e3938a" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.553704 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" event={"ID":"b683a8d2-9503-4660-8a70-d28bc5b9f75b","Type":"ContainerStarted","Data":"373a21fc8ba280289ba1ae1d2305340f633d1ba289f09ce3ce1fda5d639016c6"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.555917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" event={"ID":"9cd0a9a8-0623-45f9-84e4-cdc1a6132909","Type":"ContainerStarted","Data":"4df8ebbe420237b9e5c6cf11d351ee8d095e3b27743eb92b360ffae6ecda2bf1"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.562202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" event={"ID":"84668b75-595c-4382-bde5-a7561c200d50","Type":"ContainerStarted","Data":"53c40f437988d41231a031c6dda746752216a15e2e8fd7809e09018606b0e5c4"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.564334 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" event={"ID":"fa841a8c-8a88-4134-977a-059db7a51e45","Type":"ContainerStarted","Data":"fc1c7f1c43885f2111620697f628cf9a4e6f66506bcc42d034a3149ad50f1bc1"} Dec 05 07:05:29 crc kubenswrapper[4863]: E1205 07:05:29.566078 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" podUID="fa841a8c-8a88-4134-977a-059db7a51e45" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.570831 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" podStartSLOduration=29.57081591 podStartE2EDuration="29.57081591s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:05:27.564910266 +0000 UTC m=+1155.290907306" watchObservedRunningTime="2025-12-05 07:05:29.57081591 +0000 UTC m=+1157.296812950" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.573196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" event={"ID":"b850a070-2bf4-4163-9e18-0315e1f0b250","Type":"ContainerStarted","Data":"5b7aa8b2042ee684bc5f4f34fbc515631e028215053bbb4989727a55ba29411c"} Dec 05 07:05:29 crc kubenswrapper[4863]: E1205 07:05:29.574092 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" podUID="b850a070-2bf4-4163-9e18-0315e1f0b250" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.575334 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" event={"ID":"905df848-1d74-4ab5-b9a1-4660b651930b","Type":"ContainerStarted","Data":"d597ac800eac85100d9d0e1c7eb3be69200eb65f4e26ae09d91e8190a183f172"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.587879 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" event={"ID":"90a9e868-2b6b-4f2a-ba48-dd03855c47d9","Type":"ContainerStarted","Data":"d73a8dd71165d2f947007e2b2949aa10a86462c550506d9f4acf7a138e691c17"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.604223 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" event={"ID":"6cba313f-ee15-470e-8dcb-8251b6bfb52a","Type":"ContainerStarted","Data":"d9cbdcc5972573ef73a838590aa4f1a018a759e0eb33b493128f9f5a632d8e39"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.612419 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" event={"ID":"e17b5c94-e94d-4102-8000-fa7d3c939caf","Type":"ContainerStarted","Data":"da54fbcbcb4f77440c8f00864eef3e2ffe75f1494b68ba3bd3c399a5dd711acb"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.612467 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" event={"ID":"e17b5c94-e94d-4102-8000-fa7d3c939caf","Type":"ContainerStarted","Data":"efd530833fc5803ca9c86a4606f3f1a054473bde80b302c10857bb1025e63c75"} Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.613090 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.674633 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hbld6" podStartSLOduration=4.486800175 podStartE2EDuration="28.674614374s" podCreationTimestamp="2025-12-05 07:05:01 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.44580095 +0000 UTC m=+1130.171797990" lastFinishedPulling="2025-12-05 07:05:26.633615149 +0000 UTC m=+1154.359612189" observedRunningTime="2025-12-05 07:05:29.668575108 +0000 UTC m=+1157.394572148" watchObservedRunningTime="2025-12-05 07:05:29.674614374 +0000 UTC m=+1157.400611414" Dec 05 07:05:29 crc kubenswrapper[4863]: I1205 07:05:29.698785 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" podStartSLOduration=6.659585933 podStartE2EDuration="29.69876965s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.478626076 +0000 UTC m=+1130.204623116" lastFinishedPulling="2025-12-05 07:05:25.517809773 +0000 UTC m=+1153.243806833" observedRunningTime="2025-12-05 07:05:29.693670496 +0000 UTC m=+1157.419667536" watchObservedRunningTime="2025-12-05 07:05:29.69876965 +0000 UTC m=+1157.424766690" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.620390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" podUID="fa841a8c-8a88-4134-977a-059db7a51e45" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.620463 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" podUID="b850a070-2bf4-4163-9e18-0315e1f0b250" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.620463 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" podUID="dd03c712-5c00-447e-a266-4dfe71e3938a" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.678249 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" podUID="b683a8d2-9503-4660-8a70-d28bc5b9f75b" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.704258 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" podUID="47454469-2c4d-4cbd-befa-eb137b5d4a1e" Dec 05 07:05:30 crc kubenswrapper[4863]: E1205 07:05:30.707430 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" podUID="95da9d4b-d38a-4a40-8e9e-282b0b9da2ef" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.626213 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" event={"ID":"0c7adcb5-dc00-4705-b753-0c695c580367","Type":"ContainerStarted","Data":"bdc974a1f47e9f6ea475e931bd99933117f5c41bcc52733a28610216e84ef25a"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.626718 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.628957 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" event={"ID":"ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7","Type":"ContainerStarted","Data":"a7eafa2c2b129881fda5d5f9ae3f20035bca95bfa0d886d6b14a07c780b077b7"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.629063 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.631093 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" event={"ID":"cc74b559-6330-495d-b7a3-9582c1a0f935","Type":"ContainerStarted","Data":"835429535d3545f5909d5584e26f435b4a4cf4a5e174888aeb55e5b46398687c"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.631123 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.632958 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.633498 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" event={"ID":"9cd0a9a8-0623-45f9-84e4-cdc1a6132909","Type":"ContainerStarted","Data":"c112fddea7c39a495afa15c034b51c19b28646676b150af2ea11bddd07f3b1e7"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.633581 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.634826 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" event={"ID":"47454469-2c4d-4cbd-befa-eb137b5d4a1e","Type":"ContainerStarted","Data":"1b59173c568eabd1dd910eb364eec7f24db535459c532a0b1cce9cb2196fb052"} Dec 05 07:05:31 crc kubenswrapper[4863]: E1205 07:05:31.637840 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" podUID="47454469-2c4d-4cbd-befa-eb137b5d4a1e" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.640106 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" event={"ID":"905df848-1d74-4ab5-b9a1-4660b651930b","Type":"ContainerStarted","Data":"6e9868183fa66b5096f2a27e277964fc5f63e0aaf19d2b2586a1428207664f52"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.640789 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.642127 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" event={"ID":"0b841d1d-9d65-41e2-9db3-687320b8d75d","Type":"ContainerStarted","Data":"ead01d75a094e800b67dbaecb9ecd3bfed05495c5ab5b3c3417acdfc54426348"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.643819 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.644821 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.645572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" event={"ID":"84668b75-595c-4382-bde5-a7561c200d50","Type":"ContainerStarted","Data":"27d2f54d5c2b5862d285aa9a0e47212a3f68ebdb022b6e52a89e3ddb5efe1d83"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.646089 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.647191 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" event={"ID":"bc333aa4-954b-4d2d-8bba-8fec9631cecd","Type":"ContainerStarted","Data":"57b2d1ae32544a495770f4ee1c3c688c04b3a3aea2a0cab36184c5071a0b8a6f"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.648525 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.649601 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" event={"ID":"5e458dfe-ff58-4c00-b65d-69d634abf798","Type":"ContainerStarted","Data":"52365f061599997c3e16c2b4b80511e5da59707163ec1c0fb75592f260dc9daf"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.649978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.650155 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.650961 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" event={"ID":"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef","Type":"ContainerStarted","Data":"d512f72c7837e230b4d7cb6d5487f0a809a8d71a813c04332bad1bccb7508e20"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.653013 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" event={"ID":"aa38ed5a-a0a4-4a7d-9220-6d093163bb5b","Type":"ContainerStarted","Data":"452199d641fe40315b320e2b9b31fe38a7319155b724a8f6ba473db04f47cbca"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.654494 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.655067 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.658234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" event={"ID":"d7017ca8-d0ed-4245-8786-be169d9dde3a","Type":"ContainerStarted","Data":"b2842eff7d2ed7a071ff93c3851e9f18612bec9b373712d6567d733b812c2442"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.659871 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.660495 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.663457 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" event={"ID":"90a9e868-2b6b-4f2a-ba48-dd03855c47d9","Type":"ContainerStarted","Data":"3a3e3fe4e006525730afdf781448ccce62f748df4a2da635f031b004503479d3"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.663589 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.666069 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" event={"ID":"429debdd-9c1f-4af0-905e-899a846b5711","Type":"ContainerStarted","Data":"e8a8aeaf03ebdce2775600d95d3df96ce9a736015ad2f472b6699036d4a9f441"} Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.666270 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.668524 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.673615 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-rqgb7" podStartSLOduration=5.355824704 podStartE2EDuration="31.673596181s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.445116294 +0000 UTC m=+1130.171113334" lastFinishedPulling="2025-12-05 07:05:28.762887771 +0000 UTC m=+1156.488884811" observedRunningTime="2025-12-05 07:05:31.665403562 +0000 UTC m=+1159.391400602" watchObservedRunningTime="2025-12-05 07:05:31.673596181 +0000 UTC m=+1159.399593221" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.743062 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" podStartSLOduration=28.752440258 podStartE2EDuration="31.743044563s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:25.531445273 +0000 UTC m=+1153.257442323" lastFinishedPulling="2025-12-05 07:05:28.522049578 +0000 UTC m=+1156.248046628" observedRunningTime="2025-12-05 07:05:31.740746367 +0000 UTC m=+1159.466743397" watchObservedRunningTime="2025-12-05 07:05:31.743044563 +0000 UTC m=+1159.469041603" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.768184 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" podStartSLOduration=3.277830083 podStartE2EDuration="31.768168162s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.344334013 +0000 UTC m=+1130.070331053" lastFinishedPulling="2025-12-05 07:05:30.834672092 +0000 UTC m=+1158.560669132" observedRunningTime="2025-12-05 07:05:31.765974358 +0000 UTC m=+1159.491971418" watchObservedRunningTime="2025-12-05 07:05:31.768168162 +0000 UTC m=+1159.494165202" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.823352 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-q6t65" podStartSLOduration=4.14145147 podStartE2EDuration="31.823332368s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.29594842 +0000 UTC m=+1129.021945460" lastFinishedPulling="2025-12-05 07:05:28.977829318 +0000 UTC m=+1156.703826358" observedRunningTime="2025-12-05 07:05:31.817260971 +0000 UTC m=+1159.543258011" watchObservedRunningTime="2025-12-05 07:05:31.823332368 +0000 UTC m=+1159.549329418" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.860837 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-vbbf4" podStartSLOduration=5.427814788 podStartE2EDuration="31.860818216s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.178432755 +0000 UTC m=+1129.904429795" lastFinishedPulling="2025-12-05 07:05:28.611436183 +0000 UTC m=+1156.337433223" observedRunningTime="2025-12-05 07:05:31.857879645 +0000 UTC m=+1159.583876685" watchObservedRunningTime="2025-12-05 07:05:31.860818216 +0000 UTC m=+1159.586815256" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.923869 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2dmqx" podStartSLOduration=4.486261722 podStartE2EDuration="31.923830392s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.299153668 +0000 UTC m=+1129.025150708" lastFinishedPulling="2025-12-05 07:05:28.736722338 +0000 UTC m=+1156.462719378" observedRunningTime="2025-12-05 07:05:31.912457797 +0000 UTC m=+1159.638454837" watchObservedRunningTime="2025-12-05 07:05:31.923830392 +0000 UTC m=+1159.649827432" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.959626 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" podStartSLOduration=3.243548103 podStartE2EDuration="31.959602458s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.17325132 +0000 UTC m=+1129.899248360" lastFinishedPulling="2025-12-05 07:05:30.889305675 +0000 UTC m=+1158.615302715" observedRunningTime="2025-12-05 07:05:31.956995746 +0000 UTC m=+1159.682992786" watchObservedRunningTime="2025-12-05 07:05:31.959602458 +0000 UTC m=+1159.685599498" Dec 05 07:05:31 crc kubenswrapper[4863]: I1205 07:05:31.970820 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" podStartSLOduration=2.563206803 podStartE2EDuration="31.970806249s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.538407713 +0000 UTC m=+1129.264404753" lastFinishedPulling="2025-12-05 07:05:30.946007159 +0000 UTC m=+1158.672004199" observedRunningTime="2025-12-05 07:05:31.969605611 +0000 UTC m=+1159.695602651" watchObservedRunningTime="2025-12-05 07:05:31.970806249 +0000 UTC m=+1159.696803289" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.014718 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" podStartSLOduration=4.945987347 podStartE2EDuration="32.014698903s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.699140366 +0000 UTC m=+1129.425137406" lastFinishedPulling="2025-12-05 07:05:28.767851922 +0000 UTC m=+1156.493848962" observedRunningTime="2025-12-05 07:05:32.012054069 +0000 UTC m=+1159.738051109" watchObservedRunningTime="2025-12-05 07:05:32.014698903 +0000 UTC m=+1159.740695943" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.018808 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" podStartSLOduration=3.440216436 podStartE2EDuration="32.018786862s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.174046768 +0000 UTC m=+1129.900043798" lastFinishedPulling="2025-12-05 07:05:30.752617184 +0000 UTC m=+1158.478614224" observedRunningTime="2025-12-05 07:05:31.993341976 +0000 UTC m=+1159.719339026" watchObservedRunningTime="2025-12-05 07:05:32.018786862 +0000 UTC m=+1159.744783922" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.033450 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-fm45v" podStartSLOduration=4.993731223 podStartE2EDuration="32.033423086s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.710176833 +0000 UTC m=+1129.436173873" lastFinishedPulling="2025-12-05 07:05:28.749868696 +0000 UTC m=+1156.475865736" observedRunningTime="2025-12-05 07:05:32.032118925 +0000 UTC m=+1159.758115965" watchObservedRunningTime="2025-12-05 07:05:32.033423086 +0000 UTC m=+1159.759420126" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.052693 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" podStartSLOduration=5.027233326 podStartE2EDuration="32.052670693s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.72738879 +0000 UTC m=+1129.453385840" lastFinishedPulling="2025-12-05 07:05:28.752826167 +0000 UTC m=+1156.478823207" observedRunningTime="2025-12-05 07:05:32.047045896 +0000 UTC m=+1159.773042936" watchObservedRunningTime="2025-12-05 07:05:32.052670693 +0000 UTC m=+1159.778667733" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.674858 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" event={"ID":"95da9d4b-d38a-4a40-8e9e-282b0b9da2ef","Type":"ContainerStarted","Data":"b54d8368505848ee726d6c431c2d014fd14a1fcd8682c69e9ce23da4f661aa0e"} Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.675365 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.678740 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-dz59p" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.680051 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:32 crc kubenswrapper[4863]: E1205 07:05:32.680431 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" podUID="47454469-2c4d-4cbd-befa-eb137b5d4a1e" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.688329 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3830a1fc-64ea-4860-9324-9f71dba749f3-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f5gt8sr\" (UID: \"3830a1fc-64ea-4860-9324-9f71dba749f3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.711430 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jk9p4" podStartSLOduration=5.978720539 podStartE2EDuration="32.711408097s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.353356161 +0000 UTC m=+1130.079353201" lastFinishedPulling="2025-12-05 07:05:29.086043718 +0000 UTC m=+1156.812040759" observedRunningTime="2025-12-05 07:05:32.06869867 +0000 UTC m=+1159.794695700" watchObservedRunningTime="2025-12-05 07:05:32.711408097 +0000 UTC m=+1160.437405137" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.750605 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" podStartSLOduration=2.886690819 podStartE2EDuration="32.750584276s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.319215895 +0000 UTC m=+1130.045212935" lastFinishedPulling="2025-12-05 07:05:32.183109352 +0000 UTC m=+1159.909106392" observedRunningTime="2025-12-05 07:05:32.744682824 +0000 UTC m=+1160.470679874" watchObservedRunningTime="2025-12-05 07:05:32.750584276 +0000 UTC m=+1160.476581316" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.916686 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-f9rp7" Dec 05 07:05:32 crc kubenswrapper[4863]: I1205 07:05:32.926391 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:33 crc kubenswrapper[4863]: I1205 07:05:33.409960 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr"] Dec 05 07:05:34 crc kubenswrapper[4863]: I1205 07:05:34.055951 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" event={"ID":"b683a8d2-9503-4660-8a70-d28bc5b9f75b","Type":"ContainerStarted","Data":"816fd45060fec8b8f18ca009a5a2cf5967a50cf8fd731b71466fc4c8e555fffc"} Dec 05 07:05:34 crc kubenswrapper[4863]: I1205 07:05:34.056863 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:34 crc kubenswrapper[4863]: I1205 07:05:34.058585 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" event={"ID":"3830a1fc-64ea-4860-9324-9f71dba749f3","Type":"ContainerStarted","Data":"290854d8b9b42bd9aa14939ce890a72720ed9810267b565d3be4d046577ab8d3"} Dec 05 07:05:34 crc kubenswrapper[4863]: I1205 07:05:34.065867 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:34 crc kubenswrapper[4863]: I1205 07:05:34.079889 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" podStartSLOduration=3.483970616 podStartE2EDuration="34.079866523s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.505255261 +0000 UTC m=+1130.231252301" lastFinishedPulling="2025-12-05 07:05:33.101151168 +0000 UTC m=+1160.827148208" observedRunningTime="2025-12-05 07:05:34.077125637 +0000 UTC m=+1161.803122687" watchObservedRunningTime="2025-12-05 07:05:34.079866523 +0000 UTC m=+1161.805863563" Dec 05 07:05:36 crc kubenswrapper[4863]: I1205 07:05:36.076648 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" event={"ID":"3830a1fc-64ea-4860-9324-9f71dba749f3","Type":"ContainerStarted","Data":"58a28c175bd3f32b06c5be8feb3ab98d94cbbf3db2ae5c025344cad1a63a0fc4"} Dec 05 07:05:36 crc kubenswrapper[4863]: I1205 07:05:36.077234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" event={"ID":"3830a1fc-64ea-4860-9324-9f71dba749f3","Type":"ContainerStarted","Data":"12b9d3bb4e8399db85d4578614331b6dd456ecd3b1c93bc9286839f7bc739720"} Dec 05 07:05:36 crc kubenswrapper[4863]: I1205 07:05:36.077259 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:36 crc kubenswrapper[4863]: I1205 07:05:36.133759 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" podStartSLOduration=33.980870475 podStartE2EDuration="36.133738429s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:33.428131247 +0000 UTC m=+1161.154128297" lastFinishedPulling="2025-12-05 07:05:35.580999211 +0000 UTC m=+1163.306996251" observedRunningTime="2025-12-05 07:05:36.128422311 +0000 UTC m=+1163.854419361" watchObservedRunningTime="2025-12-05 07:05:36.133738429 +0000 UTC m=+1163.859735469" Dec 05 07:05:36 crc kubenswrapper[4863]: I1205 07:05:36.497558 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-hznfl" Dec 05 07:05:37 crc kubenswrapper[4863]: I1205 07:05:37.039260 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-thxkr" Dec 05 07:05:38 crc kubenswrapper[4863]: I1205 07:05:38.463900 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:05:38 crc kubenswrapper[4863]: I1205 07:05:38.463965 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:05:38 crc kubenswrapper[4863]: I1205 07:05:38.464017 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:05:38 crc kubenswrapper[4863]: I1205 07:05:38.464720 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:05:38 crc kubenswrapper[4863]: I1205 07:05:38.464799 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff" gracePeriod=600 Dec 05 07:05:38 crc kubenswrapper[4863]: E1205 07:05:38.603576 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6dd012a_040f_4504_9866_21443f9165d4.slice/crio-ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff.scope\": RecentStats: unable to find data in memory cache]" Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.112444 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff" exitCode=0 Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.112514 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff"} Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.113053 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251"} Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.113079 4863 scope.go:117] "RemoveContainer" containerID="d093d3ecafe6d3105031034151698dce338e7ba21ad94081ceea3ba6f6c0fcd4" Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.543324 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-jvwq8" Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.883609 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wsxg9" Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.919698 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-pcpd5" Dec 05 07:05:40 crc kubenswrapper[4863]: I1205 07:05:40.949900 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-mhtdw" Dec 05 07:05:41 crc kubenswrapper[4863]: I1205 07:05:41.157046 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-pr9bb" Dec 05 07:05:41 crc kubenswrapper[4863]: I1205 07:05:41.220030 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rgdk8" Dec 05 07:05:41 crc kubenswrapper[4863]: I1205 07:05:41.262371 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2wdhl" Dec 05 07:05:42 crc kubenswrapper[4863]: I1205 07:05:42.934619 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f5gt8sr" Dec 05 07:05:51 crc kubenswrapper[4863]: I1205 07:05:51.879818 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="hostpath-provisioner/csi-hostpathplugin-7nt4n" podUID="bcc3fd1c-ffd9-4a54-a912-e11d8a7aca52" containerName="hostpath-provisioner" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 07:05:56 crc kubenswrapper[4863]: I1205 07:05:56.271571 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" event={"ID":"47454469-2c4d-4cbd-befa-eb137b5d4a1e","Type":"ContainerStarted","Data":"fa0e27109c9343fdcb064987a6cc725b398ea1a9b1b5d0f6c510968723d78069"} Dec 05 07:05:56 crc kubenswrapper[4863]: I1205 07:05:56.279569 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" event={"ID":"dd03c712-5c00-447e-a266-4dfe71e3938a","Type":"ContainerStarted","Data":"7c24694c8d705055b63244d6aa7b8ffee206ae0ff1448e72b6e602bb66d5f093"} Dec 05 07:05:56 crc kubenswrapper[4863]: I1205 07:05:56.280822 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:05:56 crc kubenswrapper[4863]: I1205 07:05:56.306878 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" podStartSLOduration=2.703065873 podStartE2EDuration="56.306856368s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.17491093 +0000 UTC m=+1129.900907970" lastFinishedPulling="2025-12-05 07:05:55.778701415 +0000 UTC m=+1183.504698465" observedRunningTime="2025-12-05 07:05:56.304163133 +0000 UTC m=+1184.030160183" watchObservedRunningTime="2025-12-05 07:05:56.306856368 +0000 UTC m=+1184.032853418" Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.293517 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" event={"ID":"fa841a8c-8a88-4134-977a-059db7a51e45","Type":"ContainerStarted","Data":"fa254586ef19a57a8b34008db42eeec8d41af4a85299b43de6bc27a0e1a7061c"} Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.294088 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.295817 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" event={"ID":"b850a070-2bf4-4163-9e18-0315e1f0b250","Type":"ContainerStarted","Data":"07f3dace503fa099023aa131350555a13ea38f714a2eb761861bdbaf1d0601da"} Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.296118 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.321712 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" podStartSLOduration=3.104655178 podStartE2EDuration="57.321692507s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.566407581 +0000 UTC m=+1129.292404621" lastFinishedPulling="2025-12-05 07:05:55.78344491 +0000 UTC m=+1183.509441950" observedRunningTime="2025-12-05 07:05:57.318561341 +0000 UTC m=+1185.044558391" watchObservedRunningTime="2025-12-05 07:05:57.321692507 +0000 UTC m=+1185.047689547" Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.340363 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" podStartSLOduration=3.54358842 podStartE2EDuration="57.340341769s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.982884059 +0000 UTC m=+1129.708881099" lastFinishedPulling="2025-12-05 07:05:55.779637408 +0000 UTC m=+1183.505634448" observedRunningTime="2025-12-05 07:05:57.334355283 +0000 UTC m=+1185.060352343" watchObservedRunningTime="2025-12-05 07:05:57.340341769 +0000 UTC m=+1185.066338819" Dec 05 07:05:57 crc kubenswrapper[4863]: I1205 07:05:57.354506 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" podStartSLOduration=3.920242462 podStartE2EDuration="57.354465421s" podCreationTimestamp="2025-12-05 07:05:00 +0000 UTC" firstStartedPulling="2025-12-05 07:05:02.346443264 +0000 UTC m=+1130.072440304" lastFinishedPulling="2025-12-05 07:05:55.780666223 +0000 UTC m=+1183.506663263" observedRunningTime="2025-12-05 07:05:57.348093906 +0000 UTC m=+1185.074090946" watchObservedRunningTime="2025-12-05 07:05:57.354465421 +0000 UTC m=+1185.080462461" Dec 05 07:06:00 crc kubenswrapper[4863]: I1205 07:06:00.764776 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:06:01 crc kubenswrapper[4863]: I1205 07:06:01.049154 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" Dec 05 07:06:10 crc kubenswrapper[4863]: I1205 07:06:10.459326 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gs2j5" Dec 05 07:06:10 crc kubenswrapper[4863]: I1205 07:06:10.766590 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-l4gp5" Dec 05 07:06:10 crc kubenswrapper[4863]: I1205 07:06:10.882059 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hwzqx" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.751812 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.754770 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.761454 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.761497 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.761500 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.762210 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xx8jc" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.773862 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.802255 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.804829 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.808039 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.812567 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.863068 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.863166 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbt74\" (UniqueName: \"kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.964711 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.964785 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.964848 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.964995 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbt74\" (UniqueName: \"kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.965063 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztv7l\" (UniqueName: \"kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.965966 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:24 crc kubenswrapper[4863]: I1205 07:06:24.984437 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbt74\" (UniqueName: \"kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74\") pod \"dnsmasq-dns-5cd484bb89-s78mk\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.065963 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.066012 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.066068 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztv7l\" (UniqueName: \"kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.067347 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.067354 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.074152 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.089739 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztv7l\" (UniqueName: \"kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l\") pod \"dnsmasq-dns-567c455747-ltkzt\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.132440 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.528687 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:25 crc kubenswrapper[4863]: I1205 07:06:25.629779 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:25 crc kubenswrapper[4863]: W1205 07:06:25.633391 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod925f02cf_d3bb_41af_8eec_c837654bea38.slice/crio-4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268 WatchSource:0}: Error finding container 4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268: Status 404 returned error can't find the container with id 4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268 Dec 05 07:06:26 crc kubenswrapper[4863]: I1205 07:06:26.531759 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" event={"ID":"c662e39e-bc2b-403b-87c9-0bcf16d7219c","Type":"ContainerStarted","Data":"020b4961d7e78bfd45d03b8d5e6226e82181dc4f2cf9f2153bbe8bd9dde5b697"} Dec 05 07:06:26 crc kubenswrapper[4863]: I1205 07:06:26.533917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-567c455747-ltkzt" event={"ID":"925f02cf-d3bb-41af-8eec-c837654bea38","Type":"ContainerStarted","Data":"4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268"} Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.585887 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.612038 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.613212 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.635656 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.714948 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.715095 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl882\" (UniqueName: \"kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.715170 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.825396 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl882\" (UniqueName: \"kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.825507 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.825543 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.826562 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.826646 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.863115 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl882\" (UniqueName: \"kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882\") pod \"dnsmasq-dns-bc4b48fc9-wzv8t\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.909888 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.935574 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.937749 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.945233 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:27 crc kubenswrapper[4863]: I1205 07:06:27.949269 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.139845 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.140035 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.140144 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrpcn\" (UniqueName: \"kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.241895 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrpcn\" (UniqueName: \"kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.242320 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.242377 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.243374 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.244062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.261353 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrpcn\" (UniqueName: \"kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn\") pod \"dnsmasq-dns-cb666b895-hdwp6\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.262255 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.329981 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.561795 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" event={"ID":"b2c20118-6452-43c7-84e6-544b7892acaf","Type":"ContainerStarted","Data":"80b58f6522c377d67df524da5b65d4febb122293622caad222c2d41de564d005"} Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.738145 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:06:28 crc kubenswrapper[4863]: W1205 07:06:28.746177 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod408a5e03_3d56_4a4b_a657_b585b8322104.slice/crio-982a4641ea980628deecae9dc140224eba4aa33744cae6375c4b7f111020620a WatchSource:0}: Error finding container 982a4641ea980628deecae9dc140224eba4aa33744cae6375c4b7f111020620a: Status 404 returned error can't find the container with id 982a4641ea980628deecae9dc140224eba4aa33744cae6375c4b7f111020620a Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.784824 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.791290 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.793722 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797399 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797441 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797597 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797650 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797710 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-drwfx" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.797805 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.807237 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.950790 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.950832 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.950857 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47zl5\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951066 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951333 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951485 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951514 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951589 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951717 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:28 crc kubenswrapper[4863]: I1205 07:06:28.951789 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.050549 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.052081 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.052813 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.052915 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053006 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053087 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053169 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053203 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053226 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053274 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053296 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47zl5\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053365 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.053962 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.055668 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.056317 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.057400 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.057605 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.060632 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.067550 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.074113 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.074503 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.074616 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075154 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075177 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075264 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075344 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075369 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.075607 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-kjdl5" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.084985 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.088566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.090794 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47zl5\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.131986 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155083 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155125 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155150 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155279 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155341 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155434 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155524 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbqfq\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155557 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155608 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.155637 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.256952 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257018 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257052 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257162 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbqfq\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257219 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257250 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257293 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257342 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257399 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257435 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.257738 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.258362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.260775 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.260870 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.262247 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.262943 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.263639 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.263902 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.267460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.276055 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.300414 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.305009 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbqfq\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq\") pod \"rabbitmq-cell1-server-0\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.424711 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.480628 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.610411 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" event={"ID":"408a5e03-3d56-4a4b-a657-b585b8322104","Type":"ContainerStarted","Data":"982a4641ea980628deecae9dc140224eba4aa33744cae6375c4b7f111020620a"} Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.912873 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:06:29 crc kubenswrapper[4863]: I1205 07:06:29.990071 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.363203 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.364791 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.366805 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.367003 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-ws7fv" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.367122 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.367240 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.375921 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.389937 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475353 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz2t9\" (UniqueName: \"kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475396 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475465 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475554 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475807 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475839 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475901 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.475990 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577580 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz2t9\" (UniqueName: \"kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577643 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577709 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577735 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577758 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577780 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.577814 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.578437 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.579092 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.579426 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.581077 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.581405 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.607907 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.608539 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.611280 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.611945 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz2t9\" (UniqueName: \"kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9\") pod \"openstack-galera-0\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " pod="openstack/openstack-galera-0" Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.642461 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerStarted","Data":"12c440764f9c634198aff9aab51c829761d52ec3201b3e8fdf93d5674796ab5f"} Dec 05 07:06:30 crc kubenswrapper[4863]: I1205 07:06:30.700648 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.690663 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.697865 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.705604 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.705848 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-x8pzs" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.713260 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.717221 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.722966 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.809779 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxkth\" (UniqueName: \"kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.809852 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.809890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.809957 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.809985 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.810005 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.810033 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.810053 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912503 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912555 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912573 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912602 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912623 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912730 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxkth\" (UniqueName: \"kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912776 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.912812 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.913355 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.914033 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.914767 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.915052 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.915324 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.918203 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.938132 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxkth\" (UniqueName: \"kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.938613 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:31 crc kubenswrapper[4863]: I1205 07:06:31.955881 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.067455 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.099875 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.100914 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.108768 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.108839 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.109157 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-qvsrp" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.116480 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.217536 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.217595 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.217928 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.217966 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.218011 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrnpc\" (UniqueName: \"kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.320030 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.320583 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.320625 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrnpc\" (UniqueName: \"kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.320686 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.320729 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.321772 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.322150 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.324078 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.324461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.340272 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrnpc\" (UniqueName: \"kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc\") pod \"memcached-0\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " pod="openstack/memcached-0" Dec 05 07:06:32 crc kubenswrapper[4863]: I1205 07:06:32.431094 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.078485 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.080223 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.082422 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2zsct" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.094257 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.162670 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss9r4\" (UniqueName: \"kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4\") pod \"kube-state-metrics-0\" (UID: \"1b04fc39-eb66-4cee-a14f-8162314e456f\") " pod="openstack/kube-state-metrics-0" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.264125 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss9r4\" (UniqueName: \"kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4\") pod \"kube-state-metrics-0\" (UID: \"1b04fc39-eb66-4cee-a14f-8162314e456f\") " pod="openstack/kube-state-metrics-0" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.280517 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss9r4\" (UniqueName: \"kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4\") pod \"kube-state-metrics-0\" (UID: \"1b04fc39-eb66-4cee-a14f-8162314e456f\") " pod="openstack/kube-state-metrics-0" Dec 05 07:06:34 crc kubenswrapper[4863]: I1205 07:06:34.407429 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.805038 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.806660 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.808782 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.809080 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.810817 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-zr9f7" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.837795 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.862260 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.864041 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.871003 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906338 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906388 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906437 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906484 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906508 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906535 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn4w2\" (UniqueName: \"kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906550 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906570 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906587 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkzm5\" (UniqueName: \"kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906610 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906637 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906654 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:36 crc kubenswrapper[4863]: I1205 07:06:36.906686 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008272 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008330 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008351 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008386 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008416 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008435 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008486 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008514 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008529 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008552 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn4w2\" (UniqueName: \"kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008567 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008591 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.008607 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkzm5\" (UniqueName: \"kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009661 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009763 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009784 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009824 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009932 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.009937 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.010423 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.010924 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.011091 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.015203 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.015980 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.024897 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkzm5\" (UniqueName: \"kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5\") pod \"ovn-controller-ovs-nsmzq\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.028822 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn4w2\" (UniqueName: \"kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2\") pod \"ovn-controller-lvrb5\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.125109 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.185226 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:06:37 crc kubenswrapper[4863]: I1205 07:06:37.713922 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerStarted","Data":"fdd0eb043ffbf6bf9452631cf74ead66e8377eb3eafd10db25b76df07f32d7be"} Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.887187 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.897065 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.900230 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.900516 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.900693 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-lkm4n" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.900943 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.901094 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 07:06:38 crc kubenswrapper[4863]: I1205 07:06:38.908425 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040230 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040373 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040409 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040433 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040452 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040495 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040540 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.040590 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn4jk\" (UniqueName: \"kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142177 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn4jk\" (UniqueName: \"kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142247 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142337 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142367 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142388 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142406 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142422 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142718 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.142909 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.143157 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.144911 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.145047 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.147614 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.161141 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.161686 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.167367 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn4jk\" (UniqueName: \"kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.171259 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:39 crc kubenswrapper[4863]: I1205 07:06:39.215261 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.115275 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.117700 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.130105 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.130282 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-2jb2r" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.130436 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.131460 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.140365 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176170 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176225 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz8j9\" (UniqueName: \"kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176252 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176272 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176298 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176321 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176369 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.176389 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279159 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279321 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz8j9\" (UniqueName: \"kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279448 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279561 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279661 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279700 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.279720 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.280310 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.280367 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.280840 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.281226 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.281965 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.286735 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.286747 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.294225 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.302118 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz8j9\" (UniqueName: \"kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.308237 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:41 crc kubenswrapper[4863]: I1205 07:06:41.453416 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 07:06:42 crc kubenswrapper[4863]: I1205 07:06:42.140001 4863 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-mpxlw container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 07:06:42 crc kubenswrapper[4863]: I1205 07:06:42.140596 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mpxlw" podUID="78143d95-87cc-4aa5-acd1-7ad8674dfbd9" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 07:06:47 crc kubenswrapper[4863]: I1205 07:06:47.915735 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.280239 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.280440 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rrpcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-cb666b895-hdwp6_openstack(408a5e03-3d56-4a4b-a657-b585b8322104): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.282022 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" Dec 05 07:06:49 crc kubenswrapper[4863]: W1205 07:06:49.284644 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd957476_007c_4882_8449_96deebe6a63c.slice/crio-9ea807b88a5bd50ab5eda3e4c9aeb167646aff425c65cb4f49602a086a83544f WatchSource:0}: Error finding container 9ea807b88a5bd50ab5eda3e4c9aeb167646aff425c65cb4f49602a086a83544f: Status 404 returned error can't find the container with id 9ea807b88a5bd50ab5eda3e4c9aeb167646aff425c65cb4f49602a086a83544f Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.323593 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.323820 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mbt74,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5cd484bb89-s78mk_openstack(c662e39e-bc2b-403b-87c9-0bcf16d7219c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.324999 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" podUID="c662e39e-bc2b-403b-87c9-0bcf16d7219c" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.382163 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.382604 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ztv7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-567c455747-ltkzt_openstack(925f02cf-d3bb-41af-8eec-c837654bea38): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:06:49 crc kubenswrapper[4863]: E1205 07:06:49.384708 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-567c455747-ltkzt" podUID="925f02cf-d3bb-41af-8eec-c837654bea38" Dec 05 07:06:49 crc kubenswrapper[4863]: I1205 07:06:49.763727 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerStarted","Data":"9ea807b88a5bd50ab5eda3e4c9aeb167646aff425c65cb4f49602a086a83544f"} Dec 05 07:06:49 crc kubenswrapper[4863]: I1205 07:06:49.771858 4863 generic.go:334] "Generic (PLEG): container finished" podID="b2c20118-6452-43c7-84e6-544b7892acaf" containerID="0f4674259baf4f8b292a83c997e5c4ccc4565c8b2862c6ae5097364bd76fe33e" exitCode=0 Dec 05 07:06:49 crc kubenswrapper[4863]: I1205 07:06:49.772861 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" event={"ID":"b2c20118-6452-43c7-84e6-544b7892acaf","Type":"ContainerDied","Data":"0f4674259baf4f8b292a83c997e5c4ccc4565c8b2862c6ae5097364bd76fe33e"} Dec 05 07:06:49 crc kubenswrapper[4863]: I1205 07:06:49.881561 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:06:49 crc kubenswrapper[4863]: I1205 07:06:49.921541 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 07:06:49 crc kubenswrapper[4863]: W1205 07:06:49.950665 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4703140_cc56_4eb8_b06b_1033916a839f.slice/crio-a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210 WatchSource:0}: Error finding container a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210: Status 404 returned error can't find the container with id a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210 Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.102432 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.147886 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.158432 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:06:50 crc kubenswrapper[4863]: W1205 07:06:50.160663 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b04fc39_eb66_4cee_a14f_8162314e456f.slice/crio-bf238ede63bb0896c0265cc9e61669387b0cd4edf20130731b8e7accdf452c2d WatchSource:0}: Error finding container bf238ede63bb0896c0265cc9e61669387b0cd4edf20130731b8e7accdf452c2d: Status 404 returned error can't find the container with id bf238ede63bb0896c0265cc9e61669387b0cd4edf20130731b8e7accdf452c2d Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.422437 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.782260 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-567c455747-ltkzt" event={"ID":"925f02cf-d3bb-41af-8eec-c837654bea38","Type":"ContainerDied","Data":"4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.782307 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a8aa72d04097ae23fe318b636d84e13899e4b987360131b740a7e703bf03268" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.784941 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerStarted","Data":"a2a6bf8802b25dc77e1526161ae9751253ebc180e4bb7dc962f60aa72b7997a6"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.786203 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1b04fc39-eb66-4cee-a14f-8162314e456f","Type":"ContainerStarted","Data":"bf238ede63bb0896c0265cc9e61669387b0cd4edf20130731b8e7accdf452c2d"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.787013 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerStarted","Data":"f066c8d9083a4acb8d75e89ac76a6ce5908b02b33e9ed9fe3a21677fd86c7231"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.787842 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" event={"ID":"c662e39e-bc2b-403b-87c9-0bcf16d7219c","Type":"ContainerDied","Data":"020b4961d7e78bfd45d03b8d5e6226e82181dc4f2cf9f2153bbe8bd9dde5b697"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.787872 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="020b4961d7e78bfd45d03b8d5e6226e82181dc4f2cf9f2153bbe8bd9dde5b697" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.788624 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerStarted","Data":"87b6dd9bf2b89af8f45d97fe953d13eb7f3d9538772189c699d7da1d9df7d34a"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.789374 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5" event={"ID":"da88ee86-9914-4396-bb33-d00d24b00c59","Type":"ContainerStarted","Data":"644de7450b3852ae89f8355aff95db944adc56f03822c3d390aa43bdc74200e9"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.792702 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d4703140-cc56-4eb8-b06b-1033916a839f","Type":"ContainerStarted","Data":"a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210"} Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.794920 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.819905 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.827238 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config\") pod \"925f02cf-d3bb-41af-8eec-c837654bea38\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.827388 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc\") pod \"925f02cf-d3bb-41af-8eec-c837654bea38\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.827442 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztv7l\" (UniqueName: \"kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l\") pod \"925f02cf-d3bb-41af-8eec-c837654bea38\" (UID: \"925f02cf-d3bb-41af-8eec-c837654bea38\") " Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.827484 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbt74\" (UniqueName: \"kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74\") pod \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.827532 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config\") pod \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\" (UID: \"c662e39e-bc2b-403b-87c9-0bcf16d7219c\") " Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.828277 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "925f02cf-d3bb-41af-8eec-c837654bea38" (UID: "925f02cf-d3bb-41af-8eec-c837654bea38"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.828277 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config" (OuterVolumeSpecName: "config") pod "925f02cf-d3bb-41af-8eec-c837654bea38" (UID: "925f02cf-d3bb-41af-8eec-c837654bea38"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.828375 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config" (OuterVolumeSpecName: "config") pod "c662e39e-bc2b-403b-87c9-0bcf16d7219c" (UID: "c662e39e-bc2b-403b-87c9-0bcf16d7219c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.836118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74" (OuterVolumeSpecName: "kube-api-access-mbt74") pod "c662e39e-bc2b-403b-87c9-0bcf16d7219c" (UID: "c662e39e-bc2b-403b-87c9-0bcf16d7219c"). InnerVolumeSpecName "kube-api-access-mbt74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.838295 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l" (OuterVolumeSpecName: "kube-api-access-ztv7l") pod "925f02cf-d3bb-41af-8eec-c837654bea38" (UID: "925f02cf-d3bb-41af-8eec-c837654bea38"). InnerVolumeSpecName "kube-api-access-ztv7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:06:50 crc kubenswrapper[4863]: E1205 07:06:50.923310 4863 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 07:06:50 crc kubenswrapper[4863]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/b2c20118-6452-43c7-84e6-544b7892acaf/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 07:06:50 crc kubenswrapper[4863]: > podSandboxID="80b58f6522c377d67df524da5b65d4febb122293622caad222c2d41de564d005" Dec 05 07:06:50 crc kubenswrapper[4863]: E1205 07:06:50.923524 4863 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 07:06:50 crc kubenswrapper[4863]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sl882,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-bc4b48fc9-wzv8t_openstack(b2c20118-6452-43c7-84e6-544b7892acaf): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/b2c20118-6452-43c7-84e6-544b7892acaf/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 07:06:50 crc kubenswrapper[4863]: > logger="UnhandledError" Dec 05 07:06:50 crc kubenswrapper[4863]: E1205 07:06:50.924734 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/b2c20118-6452-43c7-84e6-544b7892acaf/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.935765 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.935798 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztv7l\" (UniqueName: \"kubernetes.io/projected/925f02cf-d3bb-41af-8eec-c837654bea38-kube-api-access-ztv7l\") on node \"crc\" DevicePath \"\"" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.935808 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbt74\" (UniqueName: \"kubernetes.io/projected/c662e39e-bc2b-403b-87c9-0bcf16d7219c-kube-api-access-mbt74\") on node \"crc\" DevicePath \"\"" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.935818 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c662e39e-bc2b-403b-87c9-0bcf16d7219c-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:06:50 crc kubenswrapper[4863]: I1205 07:06:50.935827 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/925f02cf-d3bb-41af-8eec-c837654bea38-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.005165 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:06:51 crc kubenswrapper[4863]: W1205 07:06:51.012170 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfff4892_c0b7_411f_9921_329db358dcde.slice/crio-344e6fed287e36d5867f5261bf0564a6b4c473cc2e0036d5e842713f9202a44c WatchSource:0}: Error finding container 344e6fed287e36d5867f5261bf0564a6b4c473cc2e0036d5e842713f9202a44c: Status 404 returned error can't find the container with id 344e6fed287e36d5867f5261bf0564a6b4c473cc2e0036d5e842713f9202a44c Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.816304 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerStarted","Data":"344e6fed287e36d5867f5261bf0564a6b4c473cc2e0036d5e842713f9202a44c"} Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.825772 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerStarted","Data":"c98e614813f2253340a26aa424aeadf3ffe62e568ea900c57f212c1299236d9b"} Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.834232 4863 generic.go:334] "Generic (PLEG): container finished" podID="408a5e03-3d56-4a4b-a657-b585b8322104" containerID="539c2c9545833848ca686d557f89d80475938ebc766dadf9c35d2552460e7673" exitCode=0 Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.834338 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" event={"ID":"408a5e03-3d56-4a4b-a657-b585b8322104","Type":"ContainerDied","Data":"539c2c9545833848ca686d557f89d80475938ebc766dadf9c35d2552460e7673"} Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.839040 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-ltkzt" Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.842413 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerStarted","Data":"c3c05af524778d23854dd79aaf03a41bacf2d449d8d6e8cc2bbf153ace4c85eb"} Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.842582 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-s78mk" Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.975524 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:51 crc kubenswrapper[4863]: I1205 07:06:51.982608 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-567c455747-ltkzt"] Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.018857 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.043527 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-s78mk"] Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.616857 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f02cf-d3bb-41af-8eec-c837654bea38" path="/var/lib/kubelet/pods/925f02cf-d3bb-41af-8eec-c837654bea38/volumes" Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.617202 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c662e39e-bc2b-403b-87c9-0bcf16d7219c" path="/var/lib/kubelet/pods/c662e39e-bc2b-403b-87c9-0bcf16d7219c/volumes" Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.849111 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" event={"ID":"b2c20118-6452-43c7-84e6-544b7892acaf","Type":"ContainerStarted","Data":"67b664271f3c16430b1157cb096916835bab111073f0c961a5118d4c48f0fc36"} Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.849924 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.854246 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" event={"ID":"408a5e03-3d56-4a4b-a657-b585b8322104","Type":"ContainerStarted","Data":"48d779418a5437ae69aae5775a1908b1fe75357fb9d9e62c922c63baf5eff2f9"} Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.854730 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.871453 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" podStartSLOduration=4.811375037 podStartE2EDuration="25.871431201s" podCreationTimestamp="2025-12-05 07:06:27 +0000 UTC" firstStartedPulling="2025-12-05 07:06:28.353609499 +0000 UTC m=+1216.079606539" lastFinishedPulling="2025-12-05 07:06:49.413665663 +0000 UTC m=+1237.139662703" observedRunningTime="2025-12-05 07:06:52.86598942 +0000 UTC m=+1240.591986460" watchObservedRunningTime="2025-12-05 07:06:52.871431201 +0000 UTC m=+1240.597428241" Dec 05 07:06:52 crc kubenswrapper[4863]: I1205 07:06:52.885231 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" podStartSLOduration=-9223372010.969559 podStartE2EDuration="25.885217123s" podCreationTimestamp="2025-12-05 07:06:27 +0000 UTC" firstStartedPulling="2025-12-05 07:06:28.749962038 +0000 UTC m=+1216.475959078" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:06:52.883732438 +0000 UTC m=+1240.609729478" watchObservedRunningTime="2025-12-05 07:06:52.885217123 +0000 UTC m=+1240.611214153" Dec 05 07:06:57 crc kubenswrapper[4863]: I1205 07:06:57.948682 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:06:58 crc kubenswrapper[4863]: I1205 07:06:58.264314 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:06:58 crc kubenswrapper[4863]: I1205 07:06:58.319838 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:06:58 crc kubenswrapper[4863]: I1205 07:06:58.905012 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="dnsmasq-dns" containerID="cri-o://67b664271f3c16430b1157cb096916835bab111073f0c961a5118d4c48f0fc36" gracePeriod=10 Dec 05 07:06:59 crc kubenswrapper[4863]: I1205 07:06:59.915691 4863 generic.go:334] "Generic (PLEG): container finished" podID="b2c20118-6452-43c7-84e6-544b7892acaf" containerID="67b664271f3c16430b1157cb096916835bab111073f0c961a5118d4c48f0fc36" exitCode=0 Dec 05 07:06:59 crc kubenswrapper[4863]: I1205 07:06:59.915763 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" event={"ID":"b2c20118-6452-43c7-84e6-544b7892acaf","Type":"ContainerDied","Data":"67b664271f3c16430b1157cb096916835bab111073f0c961a5118d4c48f0fc36"} Dec 05 07:06:59 crc kubenswrapper[4863]: I1205 07:06:59.916294 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" event={"ID":"b2c20118-6452-43c7-84e6-544b7892acaf","Type":"ContainerDied","Data":"80b58f6522c377d67df524da5b65d4febb122293622caad222c2d41de564d005"} Dec 05 07:06:59 crc kubenswrapper[4863]: I1205 07:06:59.916319 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80b58f6522c377d67df524da5b65d4febb122293622caad222c2d41de564d005" Dec 05 07:06:59 crc kubenswrapper[4863]: I1205 07:06:59.962259 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.097308 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc\") pod \"b2c20118-6452-43c7-84e6-544b7892acaf\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.097742 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config\") pod \"b2c20118-6452-43c7-84e6-544b7892acaf\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.097867 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sl882\" (UniqueName: \"kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882\") pod \"b2c20118-6452-43c7-84e6-544b7892acaf\" (UID: \"b2c20118-6452-43c7-84e6-544b7892acaf\") " Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.103520 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882" (OuterVolumeSpecName: "kube-api-access-sl882") pod "b2c20118-6452-43c7-84e6-544b7892acaf" (UID: "b2c20118-6452-43c7-84e6-544b7892acaf"). InnerVolumeSpecName "kube-api-access-sl882". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.137678 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b2c20118-6452-43c7-84e6-544b7892acaf" (UID: "b2c20118-6452-43c7-84e6-544b7892acaf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.159605 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config" (OuterVolumeSpecName: "config") pod "b2c20118-6452-43c7-84e6-544b7892acaf" (UID: "b2c20118-6452-43c7-84e6-544b7892acaf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.200767 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sl882\" (UniqueName: \"kubernetes.io/projected/b2c20118-6452-43c7-84e6-544b7892acaf-kube-api-access-sl882\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.200811 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.200824 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c20118-6452-43c7-84e6-544b7892acaf-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.928282 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerStarted","Data":"6e3a0222b6cd3c3e6e14d0553d74c6df24539ef40f69b913d38c882123e05175"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.933339 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerStarted","Data":"5859f9a814df63156a5f73e63f537579a8e080d8ff53756995610a17d058be68"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.936408 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerID="10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d" exitCode=0 Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.936462 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerDied","Data":"10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.944651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5" event={"ID":"da88ee86-9914-4396-bb33-d00d24b00c59","Type":"ContainerStarted","Data":"8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.944848 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-lvrb5" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.962774 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d4703140-cc56-4eb8-b06b-1033916a839f","Type":"ContainerStarted","Data":"d4bdd29e9c370627b90ad1916b3bd8db2227c44957a9ebe1de4ac4bdd4fdd598"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.962930 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.973616 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerStarted","Data":"98cd333e1c0d37adf112b6793afb04bdd8345ff143975ea68727af0c80a0fe6a"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.979482 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerStarted","Data":"0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.979461 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lvrb5" podStartSLOduration=15.266344502 podStartE2EDuration="24.979441406s" podCreationTimestamp="2025-12-05 07:06:36 +0000 UTC" firstStartedPulling="2025-12-05 07:06:50.106978879 +0000 UTC m=+1237.832975919" lastFinishedPulling="2025-12-05 07:06:59.820075783 +0000 UTC m=+1247.546072823" observedRunningTime="2025-12-05 07:07:00.976545066 +0000 UTC m=+1248.702542126" watchObservedRunningTime="2025-12-05 07:07:00.979441406 +0000 UTC m=+1248.705438446" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.987822 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-wzv8t" Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.988961 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1b04fc39-eb66-4cee-a14f-8162314e456f","Type":"ContainerStarted","Data":"4638cb5d3dfc21e835291a1866346f8cfc7bd2242e8dbe86d9e6e40d7488200e"} Dec 05 07:07:00 crc kubenswrapper[4863]: I1205 07:07:00.989050 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 07:07:01 crc kubenswrapper[4863]: I1205 07:07:01.065756 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:07:01 crc kubenswrapper[4863]: I1205 07:07:01.083117 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-wzv8t"] Dec 05 07:07:01 crc kubenswrapper[4863]: I1205 07:07:01.106875 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=17.333767334 podStartE2EDuration="27.106850842s" podCreationTimestamp="2025-12-05 07:06:34 +0000 UTC" firstStartedPulling="2025-12-05 07:06:50.171103632 +0000 UTC m=+1237.897100682" lastFinishedPulling="2025-12-05 07:06:59.94418715 +0000 UTC m=+1247.670184190" observedRunningTime="2025-12-05 07:07:01.074842062 +0000 UTC m=+1248.800839102" watchObservedRunningTime="2025-12-05 07:07:01.106850842 +0000 UTC m=+1248.832847882" Dec 05 07:07:01 crc kubenswrapper[4863]: I1205 07:07:01.998555 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerStarted","Data":"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8"} Dec 05 07:07:01 crc kubenswrapper[4863]: I1205 07:07:01.999073 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerStarted","Data":"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150"} Dec 05 07:07:02 crc kubenswrapper[4863]: I1205 07:07:02.024749 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=20.884841044 podStartE2EDuration="30.024725472s" podCreationTimestamp="2025-12-05 07:06:32 +0000 UTC" firstStartedPulling="2025-12-05 07:06:49.956529729 +0000 UTC m=+1237.682526769" lastFinishedPulling="2025-12-05 07:06:59.096414157 +0000 UTC m=+1246.822411197" observedRunningTime="2025-12-05 07:07:01.107878106 +0000 UTC m=+1248.833875146" watchObservedRunningTime="2025-12-05 07:07:02.024725472 +0000 UTC m=+1249.750722512" Dec 05 07:07:02 crc kubenswrapper[4863]: I1205 07:07:02.026790 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-nsmzq" podStartSLOduration=16.377369181 podStartE2EDuration="26.026778662s" podCreationTimestamp="2025-12-05 07:06:36 +0000 UTC" firstStartedPulling="2025-12-05 07:06:50.167177008 +0000 UTC m=+1237.893174058" lastFinishedPulling="2025-12-05 07:06:59.816586499 +0000 UTC m=+1247.542583539" observedRunningTime="2025-12-05 07:07:02.018842721 +0000 UTC m=+1249.744839761" watchObservedRunningTime="2025-12-05 07:07:02.026778662 +0000 UTC m=+1249.752775702" Dec 05 07:07:02 crc kubenswrapper[4863]: I1205 07:07:02.186576 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:07:02 crc kubenswrapper[4863]: I1205 07:07:02.186623 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:07:02 crc kubenswrapper[4863]: I1205 07:07:02.613746 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" path="/var/lib/kubelet/pods/b2c20118-6452-43c7-84e6-544b7892acaf/volumes" Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.016316 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerStarted","Data":"65f72860b8f159ae7a8d700f8a8fae701459f9c4e811682a22537f30d3cca929"} Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.018298 4863 generic.go:334] "Generic (PLEG): container finished" podID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerID="6e3a0222b6cd3c3e6e14d0553d74c6df24539ef40f69b913d38c882123e05175" exitCode=0 Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.018389 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerDied","Data":"6e3a0222b6cd3c3e6e14d0553d74c6df24539ef40f69b913d38c882123e05175"} Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.021632 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerStarted","Data":"03c61e5fe864e52cfdadb9c8d6acf54051414a60811bc92e6c566db8db33dd09"} Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.023455 4863 generic.go:334] "Generic (PLEG): container finished" podID="fd957476-007c-4882-8449-96deebe6a63c" containerID="98cd333e1c0d37adf112b6793afb04bdd8345ff143975ea68727af0c80a0fe6a" exitCode=0 Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.023522 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerDied","Data":"98cd333e1c0d37adf112b6793afb04bdd8345ff143975ea68727af0c80a0fe6a"} Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.041687 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=14.199595935 podStartE2EDuration="27.041666684s" podCreationTimestamp="2025-12-05 07:06:37 +0000 UTC" firstStartedPulling="2025-12-05 07:06:50.593326064 +0000 UTC m=+1238.319323144" lastFinishedPulling="2025-12-05 07:07:03.435396853 +0000 UTC m=+1251.161393893" observedRunningTime="2025-12-05 07:07:04.039754227 +0000 UTC m=+1251.765751267" watchObservedRunningTime="2025-12-05 07:07:04.041666684 +0000 UTC m=+1251.767663754" Dec 05 07:07:04 crc kubenswrapper[4863]: I1205 07:07:04.215996 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.032691 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerStarted","Data":"c82e5e7750a9dba29e1d0474f6d5fc28daeb3e913391aab3c135eb318d4d0076"} Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.035902 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerStarted","Data":"a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e"} Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.055495 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=12.646830636 podStartE2EDuration="25.055465753s" podCreationTimestamp="2025-12-05 07:06:40 +0000 UTC" firstStartedPulling="2025-12-05 07:06:51.014605293 +0000 UTC m=+1238.740602333" lastFinishedPulling="2025-12-05 07:07:03.42324041 +0000 UTC m=+1251.149237450" observedRunningTime="2025-12-05 07:07:04.10135939 +0000 UTC m=+1251.827356470" watchObservedRunningTime="2025-12-05 07:07:05.055465753 +0000 UTC m=+1252.781462803" Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.056009 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=24.544164718 podStartE2EDuration="35.056005765s" podCreationTimestamp="2025-12-05 07:06:30 +0000 UTC" firstStartedPulling="2025-12-05 07:06:49.304812814 +0000 UTC m=+1237.030809854" lastFinishedPulling="2025-12-05 07:06:59.816653811 +0000 UTC m=+1247.542650901" observedRunningTime="2025-12-05 07:07:05.051246981 +0000 UTC m=+1252.777244021" watchObservedRunningTime="2025-12-05 07:07:05.056005765 +0000 UTC m=+1252.782002805" Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.077613 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.198774734 podStartE2EDuration="36.077595275s" podCreationTimestamp="2025-12-05 07:06:29 +0000 UTC" firstStartedPulling="2025-12-05 07:06:49.940937374 +0000 UTC m=+1237.666934414" lastFinishedPulling="2025-12-05 07:06:59.819757915 +0000 UTC m=+1247.545754955" observedRunningTime="2025-12-05 07:07:05.074450609 +0000 UTC m=+1252.800447659" watchObservedRunningTime="2025-12-05 07:07:05.077595275 +0000 UTC m=+1252.803592315" Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.454041 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 07:07:05 crc kubenswrapper[4863]: I1205 07:07:05.489936 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.041582 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.074221 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.215732 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.287740 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.330832 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:06 crc kubenswrapper[4863]: E1205 07:07:06.331272 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="dnsmasq-dns" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.331295 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="dnsmasq-dns" Dec 05 07:07:06 crc kubenswrapper[4863]: E1205 07:07:06.331337 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="init" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.331349 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="init" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.331616 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2c20118-6452-43c7-84e6-544b7892acaf" containerName="dnsmasq-dns" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.335667 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.338801 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.354592 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.357133 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.357191 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.357217 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs74p\" (UniqueName: \"kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.357242 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.458189 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.458541 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.458650 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs74p\" (UniqueName: \"kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.458743 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.459650 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.459676 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.460546 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.487165 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs74p\" (UniqueName: \"kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p\") pod \"dnsmasq-dns-7cb7d9565c-rb24q\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.550001 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.552605 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.557392 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.570896 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.653677 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662171 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662238 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662265 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662286 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662351 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.662383 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdhkj\" (UniqueName: \"kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.765276 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766165 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766193 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766211 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766265 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766302 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdhkj\" (UniqueName: \"kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.766095 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.767302 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.767655 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.772366 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.796934 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.801545 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdhkj\" (UniqueName: \"kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj\") pod \"ovn-controller-metrics-48nzq\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.813567 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.863571 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.865808 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.871313 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.879010 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.895441 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.969750 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.969801 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq89w\" (UniqueName: \"kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.969879 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.969905 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:06 crc kubenswrapper[4863]: I1205 07:07:06.969983 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.072708 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.073127 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.073198 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.073239 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq89w\" (UniqueName: \"kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.073356 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.074079 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.074227 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.074462 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.074506 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.090518 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq89w\" (UniqueName: \"kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w\") pod \"dnsmasq-dns-db7757ddc-7dh7r\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.110096 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.143428 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.204519 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.314937 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.316829 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.320051 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.320051 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.320212 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-48fvw" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.320528 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.330222 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.372305 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.434478 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.480620 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.480948 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.480982 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.481034 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.481067 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb6dc\" (UniqueName: \"kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.481220 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.481257 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582578 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582632 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582668 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582694 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb6dc\" (UniqueName: \"kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582726 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582757 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.582885 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.583544 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.584003 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.584733 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.586863 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.586926 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.590161 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.607800 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb6dc\" (UniqueName: \"kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc\") pod \"ovn-northd-0\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.638638 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.682868 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:07 crc kubenswrapper[4863]: W1205 07:07:07.696460 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26285ba8_35fd_492a_8aa9_188a698d2923.slice/crio-98a20a6924f52b230c3ae7cec1065dfce9cc528ad9f05e1f9687f645ea987537 WatchSource:0}: Error finding container 98a20a6924f52b230c3ae7cec1065dfce9cc528ad9f05e1f9687f645ea987537: Status 404 returned error can't find the container with id 98a20a6924f52b230c3ae7cec1065dfce9cc528ad9f05e1f9687f645ea987537 Dec 05 07:07:07 crc kubenswrapper[4863]: I1205 07:07:07.990335 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:07:08 crc kubenswrapper[4863]: I1205 07:07:08.058365 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" event={"ID":"26285ba8-35fd-492a-8aa9-188a698d2923","Type":"ContainerStarted","Data":"98a20a6924f52b230c3ae7cec1065dfce9cc528ad9f05e1f9687f645ea987537"} Dec 05 07:07:08 crc kubenswrapper[4863]: I1205 07:07:08.059495 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-48nzq" event={"ID":"cc4802ae-16f2-4b9e-a153-b48e9c8325b8","Type":"ContainerStarted","Data":"c5b8f74cf44bcb9653dea6310e657b71201908f935875bbacff8ff6cf65865ee"} Dec 05 07:07:08 crc kubenswrapper[4863]: I1205 07:07:08.060439 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerStarted","Data":"f4ac7c4a0753d5fd636beac6d2322223616c09dccb5b89e6b1bce687a4529b62"} Dec 05 07:07:08 crc kubenswrapper[4863]: I1205 07:07:08.062558 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" event={"ID":"bdfaa044-0b18-4519-b12f-89d27330dafb","Type":"ContainerStarted","Data":"f104bbe82520adf574d2e3a54ec85966c0ee9cfc38efdacc0a66a5ffe139fd2e"} Dec 05 07:07:10 crc kubenswrapper[4863]: I1205 07:07:10.701855 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 07:07:10 crc kubenswrapper[4863]: I1205 07:07:10.702167 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 07:07:12 crc kubenswrapper[4863]: I1205 07:07:12.069364 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 07:07:12 crc kubenswrapper[4863]: I1205 07:07:12.069790 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.098504 4863 generic.go:334] "Generic (PLEG): container finished" podID="bdfaa044-0b18-4519-b12f-89d27330dafb" containerID="176d78c0e8b4c648bc2e16a1391e95fc4f8701036af081b9c78ee33b8198b537" exitCode=0 Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.098591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" event={"ID":"bdfaa044-0b18-4519-b12f-89d27330dafb","Type":"ContainerDied","Data":"176d78c0e8b4c648bc2e16a1391e95fc4f8701036af081b9c78ee33b8198b537"} Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.101456 4863 generic.go:334] "Generic (PLEG): container finished" podID="26285ba8-35fd-492a-8aa9-188a698d2923" containerID="ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f" exitCode=0 Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.101502 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" event={"ID":"26285ba8-35fd-492a-8aa9-188a698d2923","Type":"ContainerDied","Data":"ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f"} Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.108094 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-48nzq" event={"ID":"cc4802ae-16f2-4b9e-a153-b48e9c8325b8","Type":"ContainerStarted","Data":"95a3d02a39520c799ce763e3863b5a722c2d59557d6dc506840feb931175f0ff"} Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.156669 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-48nzq" podStartSLOduration=7.156641373 podStartE2EDuration="7.156641373s" podCreationTimestamp="2025-12-05 07:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:13.151776685 +0000 UTC m=+1260.877773725" watchObservedRunningTime="2025-12-05 07:07:13.156641373 +0000 UTC m=+1260.882638413" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.509335 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:13 crc kubenswrapper[4863]: E1205 07:07:13.585548 4863 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.106:36618->38.102.83.106:33381: write tcp 38.102.83.106:36618->38.102.83.106:33381: write: broken pipe Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.599830 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb\") pod \"bdfaa044-0b18-4519-b12f-89d27330dafb\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.600004 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs74p\" (UniqueName: \"kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p\") pod \"bdfaa044-0b18-4519-b12f-89d27330dafb\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.600042 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc\") pod \"bdfaa044-0b18-4519-b12f-89d27330dafb\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.600136 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config\") pod \"bdfaa044-0b18-4519-b12f-89d27330dafb\" (UID: \"bdfaa044-0b18-4519-b12f-89d27330dafb\") " Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.605547 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p" (OuterVolumeSpecName: "kube-api-access-hs74p") pod "bdfaa044-0b18-4519-b12f-89d27330dafb" (UID: "bdfaa044-0b18-4519-b12f-89d27330dafb"). InnerVolumeSpecName "kube-api-access-hs74p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.621869 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bdfaa044-0b18-4519-b12f-89d27330dafb" (UID: "bdfaa044-0b18-4519-b12f-89d27330dafb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.633645 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bdfaa044-0b18-4519-b12f-89d27330dafb" (UID: "bdfaa044-0b18-4519-b12f-89d27330dafb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.640075 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config" (OuterVolumeSpecName: "config") pod "bdfaa044-0b18-4519-b12f-89d27330dafb" (UID: "bdfaa044-0b18-4519-b12f-89d27330dafb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.702805 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs74p\" (UniqueName: \"kubernetes.io/projected/bdfaa044-0b18-4519-b12f-89d27330dafb-kube-api-access-hs74p\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.702834 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.702843 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:13 crc kubenswrapper[4863]: I1205 07:07:13.702853 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdfaa044-0b18-4519-b12f-89d27330dafb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.132686 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" event={"ID":"26285ba8-35fd-492a-8aa9-188a698d2923","Type":"ContainerStarted","Data":"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79"} Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.133981 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.135838 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerStarted","Data":"52e26abc5b133fb5aeaaf4dccc14824d17b6a85c491f8151e997e9bfef541884"} Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.135992 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerStarted","Data":"8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35"} Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.136132 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.138862 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.138995 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb7d9565c-rb24q" event={"ID":"bdfaa044-0b18-4519-b12f-89d27330dafb","Type":"ContainerDied","Data":"f104bbe82520adf574d2e3a54ec85966c0ee9cfc38efdacc0a66a5ffe139fd2e"} Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.140648 4863 scope.go:117] "RemoveContainer" containerID="176d78c0e8b4c648bc2e16a1391e95fc4f8701036af081b9c78ee33b8198b537" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.164162 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" podStartSLOduration=8.164142079 podStartE2EDuration="8.164142079s" podCreationTimestamp="2025-12-05 07:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:14.154789295 +0000 UTC m=+1261.880786345" watchObservedRunningTime="2025-12-05 07:07:14.164142079 +0000 UTC m=+1261.890139119" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.187599 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.410218492 podStartE2EDuration="7.187576174s" podCreationTimestamp="2025-12-05 07:07:07 +0000 UTC" firstStartedPulling="2025-12-05 07:07:07.995813368 +0000 UTC m=+1255.721810408" lastFinishedPulling="2025-12-05 07:07:13.77317102 +0000 UTC m=+1261.499168090" observedRunningTime="2025-12-05 07:07:14.183553497 +0000 UTC m=+1261.909550547" watchObservedRunningTime="2025-12-05 07:07:14.187576174 +0000 UTC m=+1261.913573214" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.251028 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.256252 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb7d9565c-rb24q"] Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.453629 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.539933 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.612368 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdfaa044-0b18-4519-b12f-89d27330dafb" path="/var/lib/kubelet/pods/bdfaa044-0b18-4519-b12f-89d27330dafb/volumes" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.631543 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:07:14 crc kubenswrapper[4863]: E1205 07:07:14.633253 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdfaa044-0b18-4519-b12f-89d27330dafb" containerName="init" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.633378 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdfaa044-0b18-4519-b12f-89d27330dafb" containerName="init" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.633601 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdfaa044-0b18-4519-b12f-89d27330dafb" containerName="init" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.635229 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.660879 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.723380 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.723436 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.723522 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnkqn\" (UniqueName: \"kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.723559 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.723582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.824722 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.824767 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.824802 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnkqn\" (UniqueName: \"kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.824832 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.824847 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.825668 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.825772 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.825776 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.825896 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.858674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnkqn\" (UniqueName: \"kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn\") pod \"dnsmasq-dns-59d5fbdd8c-w2qjk\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:14 crc kubenswrapper[4863]: I1205 07:07:14.954329 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.135801 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.224695 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.410168 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.773145 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.784600 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.784788 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.787091 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.787118 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-nn4fd" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.787159 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.787757 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.839320 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvrvt\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.839428 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.839524 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.839649 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.839723 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941084 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941197 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941225 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941295 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvrvt\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941348 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: E1205 07:07:15.941509 4863 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 07:07:15 crc kubenswrapper[4863]: E1205 07:07:15.941535 4863 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 07:07:15 crc kubenswrapper[4863]: E1205 07:07:15.941592 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift podName:11854f2b-ca24-48c8-b33b-60558484ea0a nodeName:}" failed. No retries permitted until 2025-12-05 07:07:16.441573487 +0000 UTC m=+1264.167570527 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift") pod "swift-storage-0" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a") : configmap "swift-ring-files" not found Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941622 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941707 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.941995 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.964657 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.971389 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-knrnt"] Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.971408 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvrvt\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.975606 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.978061 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.981834 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.982226 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.995213 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-dgwg4"] Dec 05 07:07:15 crc kubenswrapper[4863]: I1205 07:07:15.996245 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.005703 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-knrnt"] Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.020241 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-dgwg4"] Dec 05 07:07:16 crc kubenswrapper[4863]: E1205 07:07:16.026718 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-7zrl5 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-knrnt" podUID="8d36d483-9db8-4123-b548-fae00704ad5a" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042373 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042433 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042622 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042660 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042703 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042721 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zrl5\" (UniqueName: \"kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhmrv\" (UniqueName: \"kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042785 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042835 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042899 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.042991 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.043016 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.043067 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.043249 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.057557 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-knrnt"] Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.144815 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145750 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145893 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145926 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145971 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.145995 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zrl5\" (UniqueName: \"kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146041 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhmrv\" (UniqueName: \"kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146078 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146104 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146119 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146182 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146256 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146287 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146319 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146350 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146507 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.146522 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.147234 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.147737 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.149483 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.149639 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.150510 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.151920 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.152137 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.153113 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.164812 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zrl5\" (UniqueName: \"kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5\") pod \"swift-ring-rebalance-knrnt\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.167113 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhmrv\" (UniqueName: \"kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv\") pod \"swift-ring-rebalance-dgwg4\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.169848 4863 generic.go:334] "Generic (PLEG): container finished" podID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerID="a6b2b2cb296137cef37090d5a05db684b05bb2465c1155b26b4b695d0b2f239b" exitCode=0 Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.169906 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.169898 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" event={"ID":"6cc353f1-a71d-4983-b48f-81ac6140952b","Type":"ContainerDied","Data":"a6b2b2cb296137cef37090d5a05db684b05bb2465c1155b26b4b695d0b2f239b"} Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.169956 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" event={"ID":"6cc353f1-a71d-4983-b48f-81ac6140952b","Type":"ContainerStarted","Data":"89c82783c1732e803c4c457b73ab08a80843040c11fe189ababf774e0897e2c9"} Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.170160 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="dnsmasq-dns" containerID="cri-o://497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79" gracePeriod=10 Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.197963 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.248797 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249022 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zrl5\" (UniqueName: \"kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249054 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249075 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249093 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249141 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249183 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf\") pod \"8d36d483-9db8-4123-b548-fae00704ad5a\" (UID: \"8d36d483-9db8-4123-b548-fae00704ad5a\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249598 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.249915 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts" (OuterVolumeSpecName: "scripts") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.250303 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.252643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5" (OuterVolumeSpecName: "kube-api-access-7zrl5") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "kube-api-access-7zrl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.253718 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.277613 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.280644 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d36d483-9db8-4123-b548-fae00704ad5a" (UID: "8d36d483-9db8-4123-b548-fae00704ad5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.336369 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351570 4863 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351611 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zrl5\" (UniqueName: \"kubernetes.io/projected/8d36d483-9db8-4123-b548-fae00704ad5a-kube-api-access-7zrl5\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351627 4863 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351642 4863 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8d36d483-9db8-4123-b548-fae00704ad5a-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351654 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351666 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d36d483-9db8-4123-b548-fae00704ad5a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.351676 4863 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8d36d483-9db8-4123-b548-fae00704ad5a-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.453067 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:16 crc kubenswrapper[4863]: E1205 07:07:16.453543 4863 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 07:07:16 crc kubenswrapper[4863]: E1205 07:07:16.453557 4863 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 07:07:16 crc kubenswrapper[4863]: E1205 07:07:16.453596 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift podName:11854f2b-ca24-48c8-b33b-60558484ea0a nodeName:}" failed. No retries permitted until 2025-12-05 07:07:17.453583219 +0000 UTC m=+1265.179580259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift") pod "swift-storage-0" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a") : configmap "swift-ring-files" not found Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.639945 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.761025 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb\") pod \"26285ba8-35fd-492a-8aa9-188a698d2923\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.761162 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config\") pod \"26285ba8-35fd-492a-8aa9-188a698d2923\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.761193 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb\") pod \"26285ba8-35fd-492a-8aa9-188a698d2923\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.761259 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq89w\" (UniqueName: \"kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w\") pod \"26285ba8-35fd-492a-8aa9-188a698d2923\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.761330 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc\") pod \"26285ba8-35fd-492a-8aa9-188a698d2923\" (UID: \"26285ba8-35fd-492a-8aa9-188a698d2923\") " Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.770241 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w" (OuterVolumeSpecName: "kube-api-access-kq89w") pod "26285ba8-35fd-492a-8aa9-188a698d2923" (UID: "26285ba8-35fd-492a-8aa9-188a698d2923"). InnerVolumeSpecName "kube-api-access-kq89w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.803993 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "26285ba8-35fd-492a-8aa9-188a698d2923" (UID: "26285ba8-35fd-492a-8aa9-188a698d2923"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.807060 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "26285ba8-35fd-492a-8aa9-188a698d2923" (UID: "26285ba8-35fd-492a-8aa9-188a698d2923"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.810086 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "26285ba8-35fd-492a-8aa9-188a698d2923" (UID: "26285ba8-35fd-492a-8aa9-188a698d2923"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.812597 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config" (OuterVolumeSpecName: "config") pod "26285ba8-35fd-492a-8aa9-188a698d2923" (UID: "26285ba8-35fd-492a-8aa9-188a698d2923"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.820345 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-dgwg4"] Dec 05 07:07:16 crc kubenswrapper[4863]: W1205 07:07:16.824280 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd764eca_5968_479c_9a85_34360cc81ee2.slice/crio-783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee WatchSource:0}: Error finding container 783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee: Status 404 returned error can't find the container with id 783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.863266 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.863289 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.863298 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.863307 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26285ba8-35fd-492a-8aa9-188a698d2923-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:16 crc kubenswrapper[4863]: I1205 07:07:16.863316 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq89w\" (UniqueName: \"kubernetes.io/projected/26285ba8-35fd-492a-8aa9-188a698d2923-kube-api-access-kq89w\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.184984 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-dgwg4" event={"ID":"bd764eca-5968-479c-9a85-34360cc81ee2","Type":"ContainerStarted","Data":"783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee"} Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.186394 4863 generic.go:334] "Generic (PLEG): container finished" podID="26285ba8-35fd-492a-8aa9-188a698d2923" containerID="497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79" exitCode=0 Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.186442 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" event={"ID":"26285ba8-35fd-492a-8aa9-188a698d2923","Type":"ContainerDied","Data":"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79"} Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.186457 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.186490 4863 scope.go:117] "RemoveContainer" containerID="497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.186459 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db7757ddc-7dh7r" event={"ID":"26285ba8-35fd-492a-8aa9-188a698d2923","Type":"ContainerDied","Data":"98a20a6924f52b230c3ae7cec1065dfce9cc528ad9f05e1f9687f645ea987537"} Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.189048 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-knrnt" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.189091 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" event={"ID":"6cc353f1-a71d-4983-b48f-81ac6140952b","Type":"ContainerStarted","Data":"95223dcea418612ecb5a3e3dcc7de79b6b1662ef56c11682aa7a38808c146929"} Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.206697 4863 scope.go:117] "RemoveContainer" containerID="ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.221076 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" podStartSLOduration=3.221055231 podStartE2EDuration="3.221055231s" podCreationTimestamp="2025-12-05 07:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:17.210959247 +0000 UTC m=+1264.936956287" watchObservedRunningTime="2025-12-05 07:07:17.221055231 +0000 UTC m=+1264.947052281" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.229379 4863 scope.go:117] "RemoveContainer" containerID="497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79" Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.229914 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79\": container with ID starting with 497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79 not found: ID does not exist" containerID="497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.229954 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79"} err="failed to get container status \"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79\": rpc error: code = NotFound desc = could not find container \"497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79\": container with ID starting with 497d1473afa84c7b57e85ec3998f7dee793328f4c9fe79484af9852e377fec79 not found: ID does not exist" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.229981 4863 scope.go:117] "RemoveContainer" containerID="ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f" Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.230341 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f\": container with ID starting with ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f not found: ID does not exist" containerID="ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.230366 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f"} err="failed to get container status \"ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f\": rpc error: code = NotFound desc = could not find container \"ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f\": container with ID starting with ce5fc0acc37295d28e0a89e40ef341fd33ca6e84a51c0a85b7c368216829ed1f not found: ID does not exist" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.237155 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.243657 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-db7757ddc-7dh7r"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.266846 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-knrnt"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.276989 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-knrnt"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.472434 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.472698 4863 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.472717 4863 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.472763 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift podName:11854f2b-ca24-48c8-b33b-60558484ea0a nodeName:}" failed. No retries permitted until 2025-12-05 07:07:19.472750377 +0000 UTC m=+1267.198747417 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift") pod "swift-storage-0" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a") : configmap "swift-ring-files" not found Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.690164 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-ddac-account-create-update-bfr58"] Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.690560 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="dnsmasq-dns" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.690581 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="dnsmasq-dns" Dec 05 07:07:17 crc kubenswrapper[4863]: E1205 07:07:17.690602 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="init" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.690609 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="init" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.690803 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" containerName="dnsmasq-dns" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.691428 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.693884 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.697751 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ddac-account-create-update-bfr58"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.748714 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-q6zh7"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.749995 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.768762 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-q6zh7"] Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.782272 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.782548 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffk6b\" (UniqueName: \"kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.884123 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffk6b\" (UniqueName: \"kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.884614 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.884673 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.884704 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwbwc\" (UniqueName: \"kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.886187 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.913103 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffk6b\" (UniqueName: \"kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b\") pod \"glance-ddac-account-create-update-bfr58\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.986361 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.986405 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwbwc\" (UniqueName: \"kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:17 crc kubenswrapper[4863]: I1205 07:07:17.987114 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.008219 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwbwc\" (UniqueName: \"kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc\") pod \"glance-db-create-q6zh7\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.010937 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.070661 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.206182 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.217684 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.328238 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.525747 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ddac-account-create-update-bfr58"] Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.624081 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26285ba8-35fd-492a-8aa9-188a698d2923" path="/var/lib/kubelet/pods/26285ba8-35fd-492a-8aa9-188a698d2923/volumes" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.624764 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d36d483-9db8-4123-b548-fae00704ad5a" path="/var/lib/kubelet/pods/8d36d483-9db8-4123-b548-fae00704ad5a/volumes" Dec 05 07:07:18 crc kubenswrapper[4863]: I1205 07:07:18.626634 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-q6zh7"] Dec 05 07:07:19 crc kubenswrapper[4863]: I1205 07:07:19.511629 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:19 crc kubenswrapper[4863]: E1205 07:07:19.512141 4863 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 07:07:19 crc kubenswrapper[4863]: E1205 07:07:19.512157 4863 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 07:07:19 crc kubenswrapper[4863]: E1205 07:07:19.512198 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift podName:11854f2b-ca24-48c8-b33b-60558484ea0a nodeName:}" failed. No retries permitted until 2025-12-05 07:07:23.51218392 +0000 UTC m=+1271.238180960 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift") pod "swift-storage-0" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a") : configmap "swift-ring-files" not found Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.249116 4863 generic.go:334] "Generic (PLEG): container finished" podID="e0da7943-3388-478a-bc9d-58c07a9f343d" containerID="f1827d0de3c80fccbdf17a0694bc33df087fbbc30925674d4a91699101340d23" exitCode=0 Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.249213 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ddac-account-create-update-bfr58" event={"ID":"e0da7943-3388-478a-bc9d-58c07a9f343d","Type":"ContainerDied","Data":"f1827d0de3c80fccbdf17a0694bc33df087fbbc30925674d4a91699101340d23"} Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.249527 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ddac-account-create-update-bfr58" event={"ID":"e0da7943-3388-478a-bc9d-58c07a9f343d","Type":"ContainerStarted","Data":"e738890f59e0bb515b8e65c7dd293c12a74b2263643e55eb7098b6bb30b17ba8"} Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.259465 4863 generic.go:334] "Generic (PLEG): container finished" podID="8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" containerID="b0392e056cac39b351dc847a4b040f35dd6fedf1fc8850d8c1afc5e276ae80dd" exitCode=0 Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.259586 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q6zh7" event={"ID":"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72","Type":"ContainerDied","Data":"b0392e056cac39b351dc847a4b040f35dd6fedf1fc8850d8c1afc5e276ae80dd"} Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.259977 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q6zh7" event={"ID":"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72","Type":"ContainerStarted","Data":"7c6792d83eeeef60d59c95e52fff9a4254b0a267c97b76c065dc9768e4aae966"} Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.265825 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-dgwg4" event={"ID":"bd764eca-5968-479c-9a85-34360cc81ee2","Type":"ContainerStarted","Data":"61c0a380e2dcf0cd0908947582893330d846cc7902c15cbf37c53788715964ad"} Dec 05 07:07:21 crc kubenswrapper[4863]: I1205 07:07:21.303233 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-dgwg4" podStartSLOduration=2.718635886 podStartE2EDuration="6.303213005s" podCreationTimestamp="2025-12-05 07:07:15 +0000 UTC" firstStartedPulling="2025-12-05 07:07:16.826464364 +0000 UTC m=+1264.552461404" lastFinishedPulling="2025-12-05 07:07:20.411041483 +0000 UTC m=+1268.137038523" observedRunningTime="2025-12-05 07:07:21.291252587 +0000 UTC m=+1269.017249637" watchObservedRunningTime="2025-12-05 07:07:21.303213005 +0000 UTC m=+1269.029210045" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.020692 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-t6xdp"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.021775 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.035443 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t6xdp"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.067856 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjp7t\" (UniqueName: \"kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.067927 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.124192 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-50e2-account-create-update-vbhxj"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.125634 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.129750 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.136245 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-50e2-account-create-update-vbhxj"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.170642 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjp7t\" (UniqueName: \"kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.170715 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.170740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.170802 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r9vt\" (UniqueName: \"kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.171561 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.197507 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjp7t\" (UniqueName: \"kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t\") pod \"keystone-db-create-t6xdp\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.273143 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.274250 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.274321 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r9vt\" (UniqueName: \"kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.317902 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r9vt\" (UniqueName: \"kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt\") pod \"keystone-50e2-account-create-update-vbhxj\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.328267 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-spckz"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.329741 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.339425 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-spckz"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.346094 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.375771 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.376107 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlgx4\" (UniqueName: \"kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.430012 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1771-account-create-update-wpz6n"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.440785 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1771-account-create-update-wpz6n"] Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.440876 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.443372 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.451473 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.478971 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt9pm\" (UniqueName: \"kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.479028 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.479075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlgx4\" (UniqueName: \"kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.479147 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.479924 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.498203 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlgx4\" (UniqueName: \"kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4\") pod \"placement-db-create-spckz\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.580163 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt9pm\" (UniqueName: \"kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.580251 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.581315 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.597968 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt9pm\" (UniqueName: \"kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm\") pod \"placement-1771-account-create-update-wpz6n\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.656453 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.663859 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-spckz" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.725927 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.764117 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.783158 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts\") pod \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.783258 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffk6b\" (UniqueName: \"kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b\") pod \"e0da7943-3388-478a-bc9d-58c07a9f343d\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.783406 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts\") pod \"e0da7943-3388-478a-bc9d-58c07a9f343d\" (UID: \"e0da7943-3388-478a-bc9d-58c07a9f343d\") " Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.783466 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwbwc\" (UniqueName: \"kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc\") pod \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\" (UID: \"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72\") " Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.784692 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0da7943-3388-478a-bc9d-58c07a9f343d" (UID: "e0da7943-3388-478a-bc9d-58c07a9f343d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.785123 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" (UID: "8bd48690-e87b-4b7d-9ba3-46e5d33c1e72"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.787265 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b" (OuterVolumeSpecName: "kube-api-access-ffk6b") pod "e0da7943-3388-478a-bc9d-58c07a9f343d" (UID: "e0da7943-3388-478a-bc9d-58c07a9f343d"). InnerVolumeSpecName "kube-api-access-ffk6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.794666 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc" (OuterVolumeSpecName: "kube-api-access-zwbwc") pod "8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" (UID: "8bd48690-e87b-4b7d-9ba3-46e5d33c1e72"). InnerVolumeSpecName "kube-api-access-zwbwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.854746 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t6xdp"] Dec 05 07:07:22 crc kubenswrapper[4863]: W1205 07:07:22.871923 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ef1f2f1_5905_4a67_9a5a_9d42f9dce122.slice/crio-e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723 WatchSource:0}: Error finding container e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723: Status 404 returned error can't find the container with id e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723 Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.886898 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwbwc\" (UniqueName: \"kubernetes.io/projected/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-kube-api-access-zwbwc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.886923 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.886933 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffk6b\" (UniqueName: \"kubernetes.io/projected/e0da7943-3388-478a-bc9d-58c07a9f343d-kube-api-access-ffk6b\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:22 crc kubenswrapper[4863]: I1205 07:07:22.886943 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0da7943-3388-478a-bc9d-58c07a9f343d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.011629 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-50e2-account-create-update-vbhxj"] Dec 05 07:07:23 crc kubenswrapper[4863]: W1205 07:07:23.014250 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c5388c0_d809_4a83_a70d_ac33db4f5123.slice/crio-c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6 WatchSource:0}: Error finding container c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6: Status 404 returned error can't find the container with id c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6 Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.184850 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-spckz"] Dec 05 07:07:23 crc kubenswrapper[4863]: W1205 07:07:23.198365 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4057f14_25fe_40a6_a802_05472d2aad87.slice/crio-46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea WatchSource:0}: Error finding container 46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea: Status 404 returned error can't find the container with id 46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.259792 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1771-account-create-update-wpz6n"] Dec 05 07:07:23 crc kubenswrapper[4863]: W1205 07:07:23.279200 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bd6c593_c2d5_4e25_98f6_5fa0a96af771.slice/crio-eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9 WatchSource:0}: Error finding container eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9: Status 404 returned error can't find the container with id eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9 Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.288300 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ddac-account-create-update-bfr58" event={"ID":"e0da7943-3388-478a-bc9d-58c07a9f343d","Type":"ContainerDied","Data":"e738890f59e0bb515b8e65c7dd293c12a74b2263643e55eb7098b6bb30b17ba8"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.288347 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e738890f59e0bb515b8e65c7dd293c12a74b2263643e55eb7098b6bb30b17ba8" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.288430 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ddac-account-create-update-bfr58" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.298739 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-50e2-account-create-update-vbhxj" event={"ID":"5c5388c0-d809-4a83-a70d-ac33db4f5123","Type":"ContainerStarted","Data":"c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.332295 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-50e2-account-create-update-vbhxj" podStartSLOduration=1.332269118 podStartE2EDuration="1.332269118s" podCreationTimestamp="2025-12-05 07:07:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:23.320263109 +0000 UTC m=+1271.046260149" watchObservedRunningTime="2025-12-05 07:07:23.332269118 +0000 UTC m=+1271.058266198" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.334553 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q6zh7" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.334845 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q6zh7" event={"ID":"8bd48690-e87b-4b7d-9ba3-46e5d33c1e72","Type":"ContainerDied","Data":"7c6792d83eeeef60d59c95e52fff9a4254b0a267c97b76c065dc9768e4aae966"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.335393 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c6792d83eeeef60d59c95e52fff9a4254b0a267c97b76c065dc9768e4aae966" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.338233 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6xdp" event={"ID":"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122","Type":"ContainerStarted","Data":"1c1a98b157d189ecbf275f559664f14b9b9c6e413f9b98bcc9cd673dbf5bdb22"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.338327 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6xdp" event={"ID":"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122","Type":"ContainerStarted","Data":"e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.342613 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-spckz" event={"ID":"a4057f14-25fe-40a6-a802-05472d2aad87","Type":"ContainerStarted","Data":"46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea"} Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.353242 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-t6xdp" podStartSLOduration=1.353227822 podStartE2EDuration="1.353227822s" podCreationTimestamp="2025-12-05 07:07:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:23.350183419 +0000 UTC m=+1271.076180459" watchObservedRunningTime="2025-12-05 07:07:23.353227822 +0000 UTC m=+1271.079224862" Dec 05 07:07:23 crc kubenswrapper[4863]: I1205 07:07:23.604197 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:23 crc kubenswrapper[4863]: E1205 07:07:23.604369 4863 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 07:07:23 crc kubenswrapper[4863]: E1205 07:07:23.604387 4863 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 07:07:23 crc kubenswrapper[4863]: E1205 07:07:23.604434 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift podName:11854f2b-ca24-48c8-b33b-60558484ea0a nodeName:}" failed. No retries permitted until 2025-12-05 07:07:31.604419618 +0000 UTC m=+1279.330416658 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift") pod "swift-storage-0" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a") : configmap "swift-ring-files" not found Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.351287 4863 generic.go:334] "Generic (PLEG): container finished" podID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerID="c98e614813f2253340a26aa424aeadf3ffe62e568ea900c57f212c1299236d9b" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.351353 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerDied","Data":"c98e614813f2253340a26aa424aeadf3ffe62e568ea900c57f212c1299236d9b"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.360870 4863 generic.go:334] "Generic (PLEG): container finished" podID="0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" containerID="1c1a98b157d189ecbf275f559664f14b9b9c6e413f9b98bcc9cd673dbf5bdb22" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.360928 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6xdp" event={"ID":"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122","Type":"ContainerDied","Data":"1c1a98b157d189ecbf275f559664f14b9b9c6e413f9b98bcc9cd673dbf5bdb22"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.365154 4863 generic.go:334] "Generic (PLEG): container finished" podID="a4057f14-25fe-40a6-a802-05472d2aad87" containerID="e1649004a00070ec01f90918299cc6fa83e3d560abfe1e90693aa20d4579e4d5" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.365220 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-spckz" event={"ID":"a4057f14-25fe-40a6-a802-05472d2aad87","Type":"ContainerDied","Data":"e1649004a00070ec01f90918299cc6fa83e3d560abfe1e90693aa20d4579e4d5"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.386228 4863 generic.go:334] "Generic (PLEG): container finished" podID="7bd6c593-c2d5-4e25-98f6-5fa0a96af771" containerID="a1fc455ac74128a3780203bb3d350804c33addd1bc64663641bd68d9213de7ae" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.386308 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1771-account-create-update-wpz6n" event={"ID":"7bd6c593-c2d5-4e25-98f6-5fa0a96af771","Type":"ContainerDied","Data":"a1fc455ac74128a3780203bb3d350804c33addd1bc64663641bd68d9213de7ae"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.386377 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1771-account-create-update-wpz6n" event={"ID":"7bd6c593-c2d5-4e25-98f6-5fa0a96af771","Type":"ContainerStarted","Data":"eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.389081 4863 generic.go:334] "Generic (PLEG): container finished" podID="46586650-4568-4f5e-9854-30f6e0291b6b" containerID="c3c05af524778d23854dd79aaf03a41bacf2d449d8d6e8cc2bbf153ace4c85eb" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.389123 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerDied","Data":"c3c05af524778d23854dd79aaf03a41bacf2d449d8d6e8cc2bbf153ace4c85eb"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.395285 4863 generic.go:334] "Generic (PLEG): container finished" podID="5c5388c0-d809-4a83-a70d-ac33db4f5123" containerID="76e2426791e1914af8754c91af32546638412eadaf31f056ae68063c6665734a" exitCode=0 Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.395332 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-50e2-account-create-update-vbhxj" event={"ID":"5c5388c0-d809-4a83-a70d-ac33db4f5123","Type":"ContainerDied","Data":"76e2426791e1914af8754c91af32546638412eadaf31f056ae68063c6665734a"} Dec 05 07:07:24 crc kubenswrapper[4863]: I1205 07:07:24.956751 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.020810 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.021108 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="dnsmasq-dns" containerID="cri-o://48d779418a5437ae69aae5775a1908b1fe75357fb9d9e62c922c63baf5eff2f9" gracePeriod=10 Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.404433 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerStarted","Data":"7d926cefd2b8f2782d82e3be6c2ab3724e1e4e1d45592a80014e9eb9c83211c5"} Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.404694 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.407268 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerStarted","Data":"60b413aacbb400385f318f7b88f361cca235df215614238ffe8edc91b3a7e32f"} Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.407462 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.410580 4863 generic.go:334] "Generic (PLEG): container finished" podID="408a5e03-3d56-4a4b-a657-b585b8322104" containerID="48d779418a5437ae69aae5775a1908b1fe75357fb9d9e62c922c63baf5eff2f9" exitCode=0 Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.410758 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" event={"ID":"408a5e03-3d56-4a4b-a657-b585b8322104","Type":"ContainerDied","Data":"48d779418a5437ae69aae5775a1908b1fe75357fb9d9e62c922c63baf5eff2f9"} Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.466105 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.992632373 podStartE2EDuration="58.466082832s" podCreationTimestamp="2025-12-05 07:06:27 +0000 UTC" firstStartedPulling="2025-12-05 07:06:29.925191643 +0000 UTC m=+1217.651188683" lastFinishedPulling="2025-12-05 07:06:49.398642102 +0000 UTC m=+1237.124639142" observedRunningTime="2025-12-05 07:07:25.433609271 +0000 UTC m=+1273.159606321" watchObservedRunningTime="2025-12-05 07:07:25.466082832 +0000 UTC m=+1273.192079872" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.471255 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=45.352845872 podStartE2EDuration="57.471236566s" podCreationTimestamp="2025-12-05 07:06:28 +0000 UTC" firstStartedPulling="2025-12-05 07:06:37.288856436 +0000 UTC m=+1225.014853476" lastFinishedPulling="2025-12-05 07:06:49.40724713 +0000 UTC m=+1237.133244170" observedRunningTime="2025-12-05 07:07:25.466445941 +0000 UTC m=+1273.192442981" watchObservedRunningTime="2025-12-05 07:07:25.471236566 +0000 UTC m=+1273.197233606" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.518516 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.643063 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrpcn\" (UniqueName: \"kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn\") pod \"408a5e03-3d56-4a4b-a657-b585b8322104\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.643247 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc\") pod \"408a5e03-3d56-4a4b-a657-b585b8322104\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.643271 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config\") pod \"408a5e03-3d56-4a4b-a657-b585b8322104\" (UID: \"408a5e03-3d56-4a4b-a657-b585b8322104\") " Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.652434 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn" (OuterVolumeSpecName: "kube-api-access-rrpcn") pod "408a5e03-3d56-4a4b-a657-b585b8322104" (UID: "408a5e03-3d56-4a4b-a657-b585b8322104"). InnerVolumeSpecName "kube-api-access-rrpcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.691186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "408a5e03-3d56-4a4b-a657-b585b8322104" (UID: "408a5e03-3d56-4a4b-a657-b585b8322104"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.712691 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config" (OuterVolumeSpecName: "config") pod "408a5e03-3d56-4a4b-a657-b585b8322104" (UID: "408a5e03-3d56-4a4b-a657-b585b8322104"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.745508 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrpcn\" (UniqueName: \"kubernetes.io/projected/408a5e03-3d56-4a4b-a657-b585b8322104-kube-api-access-rrpcn\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.745538 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.745570 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/408a5e03-3d56-4a4b-a657-b585b8322104-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.770457 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-spckz" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.947915 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts\") pod \"a4057f14-25fe-40a6-a802-05472d2aad87\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.947992 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlgx4\" (UniqueName: \"kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4\") pod \"a4057f14-25fe-40a6-a802-05472d2aad87\" (UID: \"a4057f14-25fe-40a6-a802-05472d2aad87\") " Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.948433 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a4057f14-25fe-40a6-a802-05472d2aad87" (UID: "a4057f14-25fe-40a6-a802-05472d2aad87"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.950915 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4" (OuterVolumeSpecName: "kube-api-access-vlgx4") pod "a4057f14-25fe-40a6-a802-05472d2aad87" (UID: "a4057f14-25fe-40a6-a802-05472d2aad87"). InnerVolumeSpecName "kube-api-access-vlgx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.974689 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.979316 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:25 crc kubenswrapper[4863]: I1205 07:07:25.996857 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.049631 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4057f14-25fe-40a6-a802-05472d2aad87-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.049665 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlgx4\" (UniqueName: \"kubernetes.io/projected/a4057f14-25fe-40a6-a802-05472d2aad87-kube-api-access-vlgx4\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150250 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r9vt\" (UniqueName: \"kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt\") pod \"5c5388c0-d809-4a83-a70d-ac33db4f5123\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150292 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts\") pod \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150321 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts\") pod \"5c5388c0-d809-4a83-a70d-ac33db4f5123\" (UID: \"5c5388c0-d809-4a83-a70d-ac33db4f5123\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150452 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt9pm\" (UniqueName: \"kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm\") pod \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150502 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjp7t\" (UniqueName: \"kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t\") pod \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\" (UID: \"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.150535 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts\") pod \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\" (UID: \"7bd6c593-c2d5-4e25-98f6-5fa0a96af771\") " Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151124 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c5388c0-d809-4a83-a70d-ac33db4f5123" (UID: "5c5388c0-d809-4a83-a70d-ac33db4f5123"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151160 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" (UID: "0ef1f2f1-5905-4a67-9a5a-9d42f9dce122"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7bd6c593-c2d5-4e25-98f6-5fa0a96af771" (UID: "7bd6c593-c2d5-4e25-98f6-5fa0a96af771"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151684 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151716 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.151730 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c5388c0-d809-4a83-a70d-ac33db4f5123-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.153558 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt" (OuterVolumeSpecName: "kube-api-access-8r9vt") pod "5c5388c0-d809-4a83-a70d-ac33db4f5123" (UID: "5c5388c0-d809-4a83-a70d-ac33db4f5123"). InnerVolumeSpecName "kube-api-access-8r9vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.154084 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t" (OuterVolumeSpecName: "kube-api-access-tjp7t") pod "0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" (UID: "0ef1f2f1-5905-4a67-9a5a-9d42f9dce122"). InnerVolumeSpecName "kube-api-access-tjp7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.154514 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm" (OuterVolumeSpecName: "kube-api-access-tt9pm") pod "7bd6c593-c2d5-4e25-98f6-5fa0a96af771" (UID: "7bd6c593-c2d5-4e25-98f6-5fa0a96af771"). InnerVolumeSpecName "kube-api-access-tt9pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.253252 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt9pm\" (UniqueName: \"kubernetes.io/projected/7bd6c593-c2d5-4e25-98f6-5fa0a96af771-kube-api-access-tt9pm\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.253287 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjp7t\" (UniqueName: \"kubernetes.io/projected/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122-kube-api-access-tjp7t\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.253296 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r9vt\" (UniqueName: \"kubernetes.io/projected/5c5388c0-d809-4a83-a70d-ac33db4f5123-kube-api-access-8r9vt\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.420810 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-50e2-account-create-update-vbhxj" event={"ID":"5c5388c0-d809-4a83-a70d-ac33db4f5123","Type":"ContainerDied","Data":"c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6"} Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.420847 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c682a838f7ff0e3b8d3d5e08fc730d571d4df7d0966b523a8b381ae3d46f51b6" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.421886 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6xdp" event={"ID":"0ef1f2f1-5905-4a67-9a5a-9d42f9dce122","Type":"ContainerDied","Data":"e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723"} Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.421908 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e01693c46647534ead2c1a6ba80615e9902b16357b545dbec9935208e54e3723" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.421960 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6xdp" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.422103 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-50e2-account-create-update-vbhxj" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.437059 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-spckz" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.437605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-spckz" event={"ID":"a4057f14-25fe-40a6-a802-05472d2aad87","Type":"ContainerDied","Data":"46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea"} Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.437641 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46f404b9b54172933262513da3b5483e704b90c20a2be71e2e6b7a7ac212d7ea" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.439581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" event={"ID":"408a5e03-3d56-4a4b-a657-b585b8322104","Type":"ContainerDied","Data":"982a4641ea980628deecae9dc140224eba4aa33744cae6375c4b7f111020620a"} Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.439599 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-hdwp6" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.439791 4863 scope.go:117] "RemoveContainer" containerID="48d779418a5437ae69aae5775a1908b1fe75357fb9d9e62c922c63baf5eff2f9" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.442522 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1771-account-create-update-wpz6n" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.442408 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1771-account-create-update-wpz6n" event={"ID":"7bd6c593-c2d5-4e25-98f6-5fa0a96af771","Type":"ContainerDied","Data":"eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9"} Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.443456 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eef62db142a6565c395a3c4e4040da53d5fdddf5fc5f81f254d821e3f28257d9" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.495617 4863 scope.go:117] "RemoveContainer" containerID="539c2c9545833848ca686d557f89d80475938ebc766dadf9c35d2552460e7673" Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.553987 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.564759 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-hdwp6"] Dec 05 07:07:26 crc kubenswrapper[4863]: I1205 07:07:26.612227 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" path="/var/lib/kubelet/pods/408a5e03-3d56-4a4b-a657-b585b8322104/volumes" Dec 05 07:07:27 crc kubenswrapper[4863]: I1205 07:07:27.712561 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.060647 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-4xpt9"] Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061063 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="dnsmasq-dns" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061087 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="dnsmasq-dns" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061117 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061125 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061139 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0da7943-3388-478a-bc9d-58c07a9f343d" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061148 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0da7943-3388-478a-bc9d-58c07a9f343d" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061165 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4057f14-25fe-40a6-a802-05472d2aad87" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061174 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4057f14-25fe-40a6-a802-05472d2aad87" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061187 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061195 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061210 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="init" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061218 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="init" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061235 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5388c0-d809-4a83-a70d-ac33db4f5123" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061243 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5388c0-d809-4a83-a70d-ac33db4f5123" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: E1205 07:07:28.061256 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd6c593-c2d5-4e25-98f6-5fa0a96af771" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061264 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd6c593-c2d5-4e25-98f6-5fa0a96af771" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061460 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0da7943-3388-478a-bc9d-58c07a9f343d" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061576 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4057f14-25fe-40a6-a802-05472d2aad87" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061599 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd6c593-c2d5-4e25-98f6-5fa0a96af771" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061616 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061631 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="408a5e03-3d56-4a4b-a657-b585b8322104" containerName="dnsmasq-dns" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061651 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" containerName="mariadb-database-create" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.061671 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c5388c0-d809-4a83-a70d-ac33db4f5123" containerName="mariadb-account-create-update" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.062320 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.067975 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.068494 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6j46f" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.069623 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4xpt9"] Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.081509 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.081582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.082067 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.082133 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgv5t\" (UniqueName: \"kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.183553 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.183608 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgv5t\" (UniqueName: \"kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.183671 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.183706 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.189379 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.189407 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.202891 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgv5t\" (UniqueName: \"kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.204737 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data\") pod \"glance-db-sync-4xpt9\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.384595 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4xpt9" Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.461845 4863 generic.go:334] "Generic (PLEG): container finished" podID="bd764eca-5968-479c-9a85-34360cc81ee2" containerID="61c0a380e2dcf0cd0908947582893330d846cc7902c15cbf37c53788715964ad" exitCode=0 Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.461883 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-dgwg4" event={"ID":"bd764eca-5968-479c-9a85-34360cc81ee2","Type":"ContainerDied","Data":"61c0a380e2dcf0cd0908947582893330d846cc7902c15cbf37c53788715964ad"} Dec 05 07:07:28 crc kubenswrapper[4863]: I1205 07:07:28.963802 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4xpt9"] Dec 05 07:07:28 crc kubenswrapper[4863]: W1205 07:07:28.976766 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8b5f081_158a_4d6c_a16f_c1b90548ee63.slice/crio-46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd WatchSource:0}: Error finding container 46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd: Status 404 returned error can't find the container with id 46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.471766 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4xpt9" event={"ID":"a8b5f081-158a-4d6c-a16f-c1b90548ee63","Type":"ContainerStarted","Data":"46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd"} Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.781343 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.809337 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.809504 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.809539 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhmrv\" (UniqueName: \"kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.809597 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.809648 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.810083 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.810119 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf\") pod \"bd764eca-5968-479c-9a85-34360cc81ee2\" (UID: \"bd764eca-5968-479c-9a85-34360cc81ee2\") " Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.810659 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.810776 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.832354 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv" (OuterVolumeSpecName: "kube-api-access-jhmrv") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "kube-api-access-jhmrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.836776 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.836810 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.839612 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.850189 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts" (OuterVolumeSpecName: "scripts") pod "bd764eca-5968-479c-9a85-34360cc81ee2" (UID: "bd764eca-5968-479c-9a85-34360cc81ee2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912408 4863 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912570 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhmrv\" (UniqueName: \"kubernetes.io/projected/bd764eca-5968-479c-9a85-34360cc81ee2-kube-api-access-jhmrv\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912620 4863 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912633 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd764eca-5968-479c-9a85-34360cc81ee2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912678 4863 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bd764eca-5968-479c-9a85-34360cc81ee2-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912687 4863 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:29 crc kubenswrapper[4863]: I1205 07:07:29.912729 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd764eca-5968-479c-9a85-34360cc81ee2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:30 crc kubenswrapper[4863]: I1205 07:07:30.479581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-dgwg4" event={"ID":"bd764eca-5968-479c-9a85-34360cc81ee2","Type":"ContainerDied","Data":"783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee"} Dec 05 07:07:30 crc kubenswrapper[4863]: I1205 07:07:30.479637 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="783e344345cce8f42ebed73c65f25b256eab26377d5ade835f0b9303dd8ce7ee" Dec 05 07:07:30 crc kubenswrapper[4863]: I1205 07:07:30.479690 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-dgwg4" Dec 05 07:07:31 crc kubenswrapper[4863]: I1205 07:07:31.640029 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:31 crc kubenswrapper[4863]: I1205 07:07:31.654867 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"swift-storage-0\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " pod="openstack/swift-storage-0" Dec 05 07:07:31 crc kubenswrapper[4863]: I1205 07:07:31.713240 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.189173 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lvrb5" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" probeResult="failure" output=< Dec 05 07:07:32 crc kubenswrapper[4863]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 07:07:32 crc kubenswrapper[4863]: > Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.231637 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.247100 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.316924 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.496313 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lvrb5-config-bd2q2"] Dec 05 07:07:32 crc kubenswrapper[4863]: E1205 07:07:32.496850 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd764eca-5968-479c-9a85-34360cc81ee2" containerName="swift-ring-rebalance" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.496878 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd764eca-5968-479c-9a85-34360cc81ee2" containerName="swift-ring-rebalance" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.497105 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd764eca-5968-479c-9a85-34360cc81ee2" containerName="swift-ring-rebalance" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.497985 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.503945 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.510862 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lvrb5-config-bd2q2"] Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.517853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"cf85fe32cab18eb5e4b91c42dac04e58c9cbb1ecd3510138556917bad77673ea"} Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568329 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568501 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568546 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568594 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568690 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55z7j\" (UniqueName: \"kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.568801 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670344 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670401 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670419 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670441 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670569 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55z7j\" (UniqueName: \"kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670617 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670730 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670739 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.670828 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.671392 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.673155 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.711316 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55z7j\" (UniqueName: \"kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j\") pod \"ovn-controller-lvrb5-config-bd2q2\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:32 crc kubenswrapper[4863]: I1205 07:07:32.828394 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:33 crc kubenswrapper[4863]: I1205 07:07:33.370172 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lvrb5-config-bd2q2"] Dec 05 07:07:33 crc kubenswrapper[4863]: W1205 07:07:33.374355 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b606ab_8e66_4ede_9e34_3f0e43eb88ac.slice/crio-6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577 WatchSource:0}: Error finding container 6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577: Status 404 returned error can't find the container with id 6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577 Dec 05 07:07:33 crc kubenswrapper[4863]: I1205 07:07:33.527420 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5-config-bd2q2" event={"ID":"85b606ab-8e66-4ede-9e34-3f0e43eb88ac","Type":"ContainerStarted","Data":"6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577"} Dec 05 07:07:34 crc kubenswrapper[4863]: I1205 07:07:34.540448 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa"} Dec 05 07:07:34 crc kubenswrapper[4863]: I1205 07:07:34.540889 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe"} Dec 05 07:07:34 crc kubenswrapper[4863]: I1205 07:07:34.540908 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3"} Dec 05 07:07:34 crc kubenswrapper[4863]: I1205 07:07:34.542807 4863 generic.go:334] "Generic (PLEG): container finished" podID="85b606ab-8e66-4ede-9e34-3f0e43eb88ac" containerID="915787baed9210df0b3a0b85372a927e984fffca3f90aa2607803608cc79af28" exitCode=0 Dec 05 07:07:34 crc kubenswrapper[4863]: I1205 07:07:34.542840 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5-config-bd2q2" event={"ID":"85b606ab-8e66-4ede-9e34-3f0e43eb88ac","Type":"ContainerDied","Data":"915787baed9210df0b3a0b85372a927e984fffca3f90aa2607803608cc79af28"} Dec 05 07:07:37 crc kubenswrapper[4863]: I1205 07:07:37.161779 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-lvrb5" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.426686 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.485839 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.770399 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6zdth"] Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.771360 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.790630 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6zdth"] Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.895068 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-gr997"] Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.896365 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gr997" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.920526 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g6ml\" (UniqueName: \"kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.923936 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.924061 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6b9e-account-create-update-cqtwp"] Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.925867 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.975611 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 07:07:39 crc kubenswrapper[4863]: I1205 07:07:39.989252 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gr997"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.016545 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6b9e-account-create-update-cqtwp"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.035969 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.036084 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.036120 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5dr5\" (UniqueName: \"kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.036160 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g6ml\" (UniqueName: \"kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.036199 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-596w9\" (UniqueName: \"kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.036261 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.037093 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.073097 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g6ml\" (UniqueName: \"kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml\") pod \"cinder-db-create-6zdth\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.094579 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-d676-account-create-update-nvl4v"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.095941 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.101900 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d676-account-create-update-nvl4v"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.102829 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.103712 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.137236 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.137282 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5dr5\" (UniqueName: \"kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.137333 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-596w9\" (UniqueName: \"kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.137378 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.138335 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.138367 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.156788 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5dr5\" (UniqueName: \"kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5\") pod \"barbican-6b9e-account-create-update-cqtwp\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.161515 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-596w9\" (UniqueName: \"kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9\") pod \"barbican-db-create-gr997\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.235586 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-tjj7d"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.239411 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzr67\" (UniqueName: \"kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.239499 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.240841 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.242807 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.244681 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-tjj7d"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.247430 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.247503 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.247970 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sbn4r" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.287523 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gr997" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.314094 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.318125 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-xtjxf"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.319308 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.341051 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzr67\" (UniqueName: \"kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.341102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.341147 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.341199 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tplg\" (UniqueName: \"kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.341215 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.342034 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.361629 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzr67\" (UniqueName: \"kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67\") pod \"cinder-d676-account-create-update-nvl4v\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.366926 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xtjxf"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.433309 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.443316 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.443379 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.443423 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x972k\" (UniqueName: \"kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.443459 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tplg\" (UniqueName: \"kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.443504 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.447350 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.449122 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.464696 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tplg\" (UniqueName: \"kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg\") pod \"keystone-db-sync-tjj7d\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.485689 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-adda-account-create-update-hj2tr"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.486815 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.489890 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.499303 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-adda-account-create-update-hj2tr"] Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.545543 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.545628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x972k\" (UniqueName: \"kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.546314 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.561325 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.566568 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x972k\" (UniqueName: \"kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k\") pod \"neutron-db-create-xtjxf\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.641513 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.646789 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.646866 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zghfp\" (UniqueName: \"kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.748791 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.748853 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zghfp\" (UniqueName: \"kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.749638 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.769281 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zghfp\" (UniqueName: \"kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp\") pod \"neutron-adda-account-create-update-hj2tr\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:40 crc kubenswrapper[4863]: I1205 07:07:40.811925 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:44 crc kubenswrapper[4863]: E1205 07:07:44.250044 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63" Dec 05 07:07:44 crc kubenswrapper[4863]: E1205 07:07:44.250679 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lgv5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-4xpt9_openstack(a8b5f081-158a-4d6c-a16f-c1b90548ee63): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:07:44 crc kubenswrapper[4863]: E1205 07:07:44.251752 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-4xpt9" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.407455 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510206 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55z7j\" (UniqueName: \"kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510255 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510274 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510395 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510425 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.510694 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts\") pod \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\" (UID: \"85b606ab-8e66-4ede-9e34-3f0e43eb88ac\") " Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.511643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.511758 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts" (OuterVolumeSpecName: "scripts") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.511784 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.511800 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.511814 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run" (OuterVolumeSpecName: "var-run") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.522243 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j" (OuterVolumeSpecName: "kube-api-access-55z7j") pod "85b606ab-8e66-4ede-9e34-3f0e43eb88ac" (UID: "85b606ab-8e66-4ede-9e34-3f0e43eb88ac"). InnerVolumeSpecName "kube-api-access-55z7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617505 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617545 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55z7j\" (UniqueName: \"kubernetes.io/projected/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-kube-api-access-55z7j\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617560 4863 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617571 4863 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617583 4863 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.617594 4863 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/85b606ab-8e66-4ede-9e34-3f0e43eb88ac-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.634303 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5-config-bd2q2" event={"ID":"85b606ab-8e66-4ede-9e34-3f0e43eb88ac","Type":"ContainerDied","Data":"6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577"} Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.634343 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cdceec9fd12c18985479e6a545a0954827ff85276c0958a40d4306e976a1577" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.634338 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5-config-bd2q2" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.637253 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354"} Dec 05 07:07:44 crc kubenswrapper[4863]: E1205 07:07:44.638755 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63\\\"\"" pod="openstack/glance-db-sync-4xpt9" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.835916 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gr997"] Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.851885 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6zdth"] Dec 05 07:07:44 crc kubenswrapper[4863]: W1205 07:07:44.860251 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3502bece_8818_4d62_8d27_5ec406844377.slice/crio-7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b WatchSource:0}: Error finding container 7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b: Status 404 returned error can't find the container with id 7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.875215 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xtjxf"] Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.964413 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d676-account-create-update-nvl4v"] Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.975755 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-adda-account-create-update-hj2tr"] Dec 05 07:07:44 crc kubenswrapper[4863]: W1205 07:07:44.983933 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd09e490b_58f5_43e0_9500_0272f08fc3af.slice/crio-becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932 WatchSource:0}: Error finding container becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932: Status 404 returned error can't find the container with id becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932 Dec 05 07:07:44 crc kubenswrapper[4863]: W1205 07:07:44.985035 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6a8da69_afe1_4825_b8ad_ffb693553773.slice/crio-a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01 WatchSource:0}: Error finding container a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01: Status 404 returned error can't find the container with id a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01 Dec 05 07:07:44 crc kubenswrapper[4863]: W1205 07:07:44.985966 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde4b625d_aaf6_497a_b88a_857054ba84e9.slice/crio-c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df WatchSource:0}: Error finding container c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df: Status 404 returned error can't find the container with id c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.987100 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-tjj7d"] Dec 05 07:07:44 crc kubenswrapper[4863]: I1205 07:07:44.997453 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6b9e-account-create-update-cqtwp"] Dec 05 07:07:44 crc kubenswrapper[4863]: W1205 07:07:44.997703 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05b513fa_8a27_433e_85f1_7d252e0e6050.slice/crio-469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414 WatchSource:0}: Error finding container 469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414: Status 404 returned error can't find the container with id 469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.507459 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lvrb5-config-bd2q2"] Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.515488 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lvrb5-config-bd2q2"] Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.648849 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-tjj7d" event={"ID":"05b513fa-8a27-433e-85f1-7d252e0e6050","Type":"ContainerStarted","Data":"469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.651386 4863 generic.go:334] "Generic (PLEG): container finished" podID="d09e490b-58f5-43e0-9500-0272f08fc3af" containerID="38f5b53c0ea567de2f8857aaa96ae6751e0cd07901f4dfd6bb2131fde7c572fe" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.651420 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d676-account-create-update-nvl4v" event={"ID":"d09e490b-58f5-43e0-9500-0272f08fc3af","Type":"ContainerDied","Data":"38f5b53c0ea567de2f8857aaa96ae6751e0cd07901f4dfd6bb2131fde7c572fe"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.651450 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d676-account-create-update-nvl4v" event={"ID":"d09e490b-58f5-43e0-9500-0272f08fc3af","Type":"ContainerStarted","Data":"becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.653261 4863 generic.go:334] "Generic (PLEG): container finished" podID="695c91cf-5f69-4818-a1c1-e9739b247eca" containerID="9435f9db1ba045d9fc2cc4233222d9272b7c1a41bdd6a6478bbbfa56c7ac0d85" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.653366 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gr997" event={"ID":"695c91cf-5f69-4818-a1c1-e9739b247eca","Type":"ContainerDied","Data":"9435f9db1ba045d9fc2cc4233222d9272b7c1a41bdd6a6478bbbfa56c7ac0d85"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.653394 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gr997" event={"ID":"695c91cf-5f69-4818-a1c1-e9739b247eca","Type":"ContainerStarted","Data":"3c4bb897fa4c3443f0f43acfebec15b0678317750e27be321cefa98f8b7c844c"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.654968 4863 generic.go:334] "Generic (PLEG): container finished" podID="de4b625d-aaf6-497a-b88a-857054ba84e9" containerID="87c7fad0a7fa0245af49dd158e6d71d4d21f77fa1b4efce62c830b18c18b21c2" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.655043 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b9e-account-create-update-cqtwp" event={"ID":"de4b625d-aaf6-497a-b88a-857054ba84e9","Type":"ContainerDied","Data":"87c7fad0a7fa0245af49dd158e6d71d4d21f77fa1b4efce62c830b18c18b21c2"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.655076 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b9e-account-create-update-cqtwp" event={"ID":"de4b625d-aaf6-497a-b88a-857054ba84e9","Type":"ContainerStarted","Data":"c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.660107 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6a8da69-afe1-4825-b8ad-ffb693553773" containerID="2e41facbcb2249f288be83cb87bf2c0afc4974060007e5ece212e32278aa6b28" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.660191 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-adda-account-create-update-hj2tr" event={"ID":"b6a8da69-afe1-4825-b8ad-ffb693553773","Type":"ContainerDied","Data":"2e41facbcb2249f288be83cb87bf2c0afc4974060007e5ece212e32278aa6b28"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.660230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-adda-account-create-update-hj2tr" event={"ID":"b6a8da69-afe1-4825-b8ad-ffb693553773","Type":"ContainerStarted","Data":"a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.661882 4863 generic.go:334] "Generic (PLEG): container finished" podID="3502bece-8818-4d62-8d27-5ec406844377" containerID="e819614128145f00ec465bc6c91fbc37d32c0edf71454158f564f3b502d8e840" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.661917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6zdth" event={"ID":"3502bece-8818-4d62-8d27-5ec406844377","Type":"ContainerDied","Data":"e819614128145f00ec465bc6c91fbc37d32c0edf71454158f564f3b502d8e840"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.661956 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6zdth" event={"ID":"3502bece-8818-4d62-8d27-5ec406844377","Type":"ContainerStarted","Data":"7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.663515 4863 generic.go:334] "Generic (PLEG): container finished" podID="35b958e2-c440-43c3-b09c-615cf1ebf331" containerID="4e621dd30b80605ad9f283015de3b8a62f9df034896f0c16defc57d09b73ac8b" exitCode=0 Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.663551 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xtjxf" event={"ID":"35b958e2-c440-43c3-b09c-615cf1ebf331","Type":"ContainerDied","Data":"4e621dd30b80605ad9f283015de3b8a62f9df034896f0c16defc57d09b73ac8b"} Dec 05 07:07:45 crc kubenswrapper[4863]: I1205 07:07:45.663574 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xtjxf" event={"ID":"35b958e2-c440-43c3-b09c-615cf1ebf331","Type":"ContainerStarted","Data":"20198e9860044f8500709638cc21c49c44f61f67dab760e9fa4effc5fafdea9d"} Dec 05 07:07:46 crc kubenswrapper[4863]: I1205 07:07:46.628099 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85b606ab-8e66-4ede-9e34-3f0e43eb88ac" path="/var/lib/kubelet/pods/85b606ab-8e66-4ede-9e34-3f0e43eb88ac/volumes" Dec 05 07:07:46 crc kubenswrapper[4863]: I1205 07:07:46.679128 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc"} Dec 05 07:07:46 crc kubenswrapper[4863]: I1205 07:07:46.679190 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba"} Dec 05 07:07:46 crc kubenswrapper[4863]: I1205 07:07:46.679208 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468"} Dec 05 07:07:46 crc kubenswrapper[4863]: I1205 07:07:46.679234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.065467 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.170020 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts\") pod \"35b958e2-c440-43c3-b09c-615cf1ebf331\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.170492 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x972k\" (UniqueName: \"kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k\") pod \"35b958e2-c440-43c3-b09c-615cf1ebf331\" (UID: \"35b958e2-c440-43c3-b09c-615cf1ebf331\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.172069 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35b958e2-c440-43c3-b09c-615cf1ebf331" (UID: "35b958e2-c440-43c3-b09c-615cf1ebf331"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.186026 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k" (OuterVolumeSpecName: "kube-api-access-x972k") pod "35b958e2-c440-43c3-b09c-615cf1ebf331" (UID: "35b958e2-c440-43c3-b09c-615cf1ebf331"). InnerVolumeSpecName "kube-api-access-x972k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.196704 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.230814 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.272996 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzr67\" (UniqueName: \"kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67\") pod \"d09e490b-58f5-43e0-9500-0272f08fc3af\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.273309 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts\") pod \"d09e490b-58f5-43e0-9500-0272f08fc3af\" (UID: \"d09e490b-58f5-43e0-9500-0272f08fc3af\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.273865 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x972k\" (UniqueName: \"kubernetes.io/projected/35b958e2-c440-43c3-b09c-615cf1ebf331-kube-api-access-x972k\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.273886 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35b958e2-c440-43c3-b09c-615cf1ebf331-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.274339 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d09e490b-58f5-43e0-9500-0272f08fc3af" (UID: "d09e490b-58f5-43e0-9500-0272f08fc3af"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.295807 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67" (OuterVolumeSpecName: "kube-api-access-tzr67") pod "d09e490b-58f5-43e0-9500-0272f08fc3af" (UID: "d09e490b-58f5-43e0-9500-0272f08fc3af"). InnerVolumeSpecName "kube-api-access-tzr67". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.324285 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.375103 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts\") pod \"de4b625d-aaf6-497a-b88a-857054ba84e9\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.375326 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5dr5\" (UniqueName: \"kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5\") pod \"de4b625d-aaf6-497a-b88a-857054ba84e9\" (UID: \"de4b625d-aaf6-497a-b88a-857054ba84e9\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.375773 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzr67\" (UniqueName: \"kubernetes.io/projected/d09e490b-58f5-43e0-9500-0272f08fc3af-kube-api-access-tzr67\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.375799 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d09e490b-58f5-43e0-9500-0272f08fc3af-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.377961 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de4b625d-aaf6-497a-b88a-857054ba84e9" (UID: "de4b625d-aaf6-497a-b88a-857054ba84e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.381633 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5" (OuterVolumeSpecName: "kube-api-access-w5dr5") pod "de4b625d-aaf6-497a-b88a-857054ba84e9" (UID: "de4b625d-aaf6-497a-b88a-857054ba84e9"). InnerVolumeSpecName "kube-api-access-w5dr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.412904 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.417906 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gr997" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.477156 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zghfp\" (UniqueName: \"kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp\") pod \"b6a8da69-afe1-4825-b8ad-ffb693553773\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.477230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts\") pod \"b6a8da69-afe1-4825-b8ad-ffb693553773\" (UID: \"b6a8da69-afe1-4825-b8ad-ffb693553773\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.479251 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6a8da69-afe1-4825-b8ad-ffb693553773" (UID: "b6a8da69-afe1-4825-b8ad-ffb693553773"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.485596 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp" (OuterVolumeSpecName: "kube-api-access-zghfp") pod "b6a8da69-afe1-4825-b8ad-ffb693553773" (UID: "b6a8da69-afe1-4825-b8ad-ffb693553773"). InnerVolumeSpecName "kube-api-access-zghfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.499205 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zghfp\" (UniqueName: \"kubernetes.io/projected/b6a8da69-afe1-4825-b8ad-ffb693553773-kube-api-access-zghfp\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.499244 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5dr5\" (UniqueName: \"kubernetes.io/projected/de4b625d-aaf6-497a-b88a-857054ba84e9-kube-api-access-w5dr5\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.499265 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a8da69-afe1-4825-b8ad-ffb693553773-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.499276 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4b625d-aaf6-497a-b88a-857054ba84e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.600410 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g6ml\" (UniqueName: \"kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml\") pod \"3502bece-8818-4d62-8d27-5ec406844377\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.600533 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts\") pod \"3502bece-8818-4d62-8d27-5ec406844377\" (UID: \"3502bece-8818-4d62-8d27-5ec406844377\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.600827 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts\") pod \"695c91cf-5f69-4818-a1c1-e9739b247eca\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.600846 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-596w9\" (UniqueName: \"kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9\") pod \"695c91cf-5f69-4818-a1c1-e9739b247eca\" (UID: \"695c91cf-5f69-4818-a1c1-e9739b247eca\") " Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.601186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3502bece-8818-4d62-8d27-5ec406844377" (UID: "3502bece-8818-4d62-8d27-5ec406844377"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.601252 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "695c91cf-5f69-4818-a1c1-e9739b247eca" (UID: "695c91cf-5f69-4818-a1c1-e9739b247eca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.603634 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml" (OuterVolumeSpecName: "kube-api-access-4g6ml") pod "3502bece-8818-4d62-8d27-5ec406844377" (UID: "3502bece-8818-4d62-8d27-5ec406844377"). InnerVolumeSpecName "kube-api-access-4g6ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.604689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9" (OuterVolumeSpecName: "kube-api-access-596w9") pod "695c91cf-5f69-4818-a1c1-e9739b247eca" (UID: "695c91cf-5f69-4818-a1c1-e9739b247eca"). InnerVolumeSpecName "kube-api-access-596w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.687591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d676-account-create-update-nvl4v" event={"ID":"d09e490b-58f5-43e0-9500-0272f08fc3af","Type":"ContainerDied","Data":"becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.687637 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="becb4198ba49a11292a0f679671f4fca493a78107cf9706c90756b7a8b1c4932" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.687609 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d676-account-create-update-nvl4v" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.689454 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b9e-account-create-update-cqtwp" event={"ID":"de4b625d-aaf6-497a-b88a-857054ba84e9","Type":"ContainerDied","Data":"c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.689494 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c73a0f46540e3de5a0102dae3c039c37cfdabc35fcde391e91f471df905d64df" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.689460 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b9e-account-create-update-cqtwp" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.692647 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gr997" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.692677 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gr997" event={"ID":"695c91cf-5f69-4818-a1c1-e9739b247eca","Type":"ContainerDied","Data":"3c4bb897fa4c3443f0f43acfebec15b0678317750e27be321cefa98f8b7c844c"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.692725 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c4bb897fa4c3443f0f43acfebec15b0678317750e27be321cefa98f8b7c844c" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.694190 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-adda-account-create-update-hj2tr" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.694205 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-adda-account-create-update-hj2tr" event={"ID":"b6a8da69-afe1-4825-b8ad-ffb693553773","Type":"ContainerDied","Data":"a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.694253 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a58d1ddd31a17b5e350f430cbc50b601e72ba33ccefd048f24fc141208247e01" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.695716 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6zdth" event={"ID":"3502bece-8818-4d62-8d27-5ec406844377","Type":"ContainerDied","Data":"7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.695737 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7925e6686798f82b67974c384989f5f03a0951a5244b681d7dd9bfeada8ee22b" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.695792 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6zdth" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.702445 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3502bece-8818-4d62-8d27-5ec406844377-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.702490 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/695c91cf-5f69-4818-a1c1-e9739b247eca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.702503 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-596w9\" (UniqueName: \"kubernetes.io/projected/695c91cf-5f69-4818-a1c1-e9739b247eca-kube-api-access-596w9\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.702517 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g6ml\" (UniqueName: \"kubernetes.io/projected/3502bece-8818-4d62-8d27-5ec406844377-kube-api-access-4g6ml\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.703155 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xtjxf" event={"ID":"35b958e2-c440-43c3-b09c-615cf1ebf331","Type":"ContainerDied","Data":"20198e9860044f8500709638cc21c49c44f61f67dab760e9fa4effc5fafdea9d"} Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.703195 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xtjxf" Dec 05 07:07:47 crc kubenswrapper[4863]: I1205 07:07:47.703195 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20198e9860044f8500709638cc21c49c44f61f67dab760e9fa4effc5fafdea9d" Dec 05 07:07:51 crc kubenswrapper[4863]: I1205 07:07:51.743201 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-tjj7d" event={"ID":"05b513fa-8a27-433e-85f1-7d252e0e6050","Type":"ContainerStarted","Data":"36c4112304fcce868336454395c84be64360e8c50b269d0f5e8ce5d1e343c821"} Dec 05 07:07:51 crc kubenswrapper[4863]: I1205 07:07:51.757058 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311"} Dec 05 07:07:51 crc kubenswrapper[4863]: I1205 07:07:51.757102 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164"} Dec 05 07:07:51 crc kubenswrapper[4863]: I1205 07:07:51.772461 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-tjj7d" podStartSLOduration=5.613104508 podStartE2EDuration="11.772427903s" podCreationTimestamp="2025-12-05 07:07:40 +0000 UTC" firstStartedPulling="2025-12-05 07:07:45.029440381 +0000 UTC m=+1292.755437411" lastFinishedPulling="2025-12-05 07:07:51.188763766 +0000 UTC m=+1298.914760806" observedRunningTime="2025-12-05 07:07:51.770899577 +0000 UTC m=+1299.496896617" watchObservedRunningTime="2025-12-05 07:07:51.772427903 +0000 UTC m=+1299.498424943" Dec 05 07:07:52 crc kubenswrapper[4863]: I1205 07:07:52.770239 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde"} Dec 05 07:07:52 crc kubenswrapper[4863]: I1205 07:07:52.770498 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab"} Dec 05 07:07:53 crc kubenswrapper[4863]: I1205 07:07:53.785916 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd"} Dec 05 07:07:53 crc kubenswrapper[4863]: I1205 07:07:53.786609 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5"} Dec 05 07:07:53 crc kubenswrapper[4863]: I1205 07:07:53.786636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerStarted","Data":"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a"} Dec 05 07:07:53 crc kubenswrapper[4863]: I1205 07:07:53.834980 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.974865609 podStartE2EDuration="39.834960762s" podCreationTimestamp="2025-12-05 07:07:14 +0000 UTC" firstStartedPulling="2025-12-05 07:07:32.324342119 +0000 UTC m=+1280.050339159" lastFinishedPulling="2025-12-05 07:07:51.184437282 +0000 UTC m=+1298.910434312" observedRunningTime="2025-12-05 07:07:53.834556833 +0000 UTC m=+1301.560553903" watchObservedRunningTime="2025-12-05 07:07:53.834960762 +0000 UTC m=+1301.560957812" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123293 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123760 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de4b625d-aaf6-497a-b88a-857054ba84e9" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123781 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="de4b625d-aaf6-497a-b88a-857054ba84e9" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123804 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3502bece-8818-4d62-8d27-5ec406844377" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123812 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3502bece-8818-4d62-8d27-5ec406844377" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123827 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6a8da69-afe1-4825-b8ad-ffb693553773" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123836 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6a8da69-afe1-4825-b8ad-ffb693553773" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123851 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b606ab-8e66-4ede-9e34-3f0e43eb88ac" containerName="ovn-config" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123860 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b606ab-8e66-4ede-9e34-3f0e43eb88ac" containerName="ovn-config" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123877 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09e490b-58f5-43e0-9500-0272f08fc3af" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123885 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09e490b-58f5-43e0-9500-0272f08fc3af" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123900 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695c91cf-5f69-4818-a1c1-e9739b247eca" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123907 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="695c91cf-5f69-4818-a1c1-e9739b247eca" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: E1205 07:07:54.123922 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b958e2-c440-43c3-b09c-615cf1ebf331" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.123930 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b958e2-c440-43c3-b09c-615cf1ebf331" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124106 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09e490b-58f5-43e0-9500-0272f08fc3af" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124121 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b606ab-8e66-4ede-9e34-3f0e43eb88ac" containerName="ovn-config" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124142 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6a8da69-afe1-4825-b8ad-ffb693553773" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124156 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b958e2-c440-43c3-b09c-615cf1ebf331" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124168 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="de4b625d-aaf6-497a-b88a-857054ba84e9" containerName="mariadb-account-create-update" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124180 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3502bece-8818-4d62-8d27-5ec406844377" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.124192 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="695c91cf-5f69-4818-a1c1-e9739b247eca" containerName="mariadb-database-create" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.125290 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.129312 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.139810 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.223893 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.224008 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.224039 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.224136 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.224187 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.224219 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m67m\" (UniqueName: \"kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325280 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325355 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325385 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325495 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325553 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.325586 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m67m\" (UniqueName: \"kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.326618 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.326636 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.326646 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.326617 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.327198 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.345344 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m67m\" (UniqueName: \"kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m\") pod \"dnsmasq-dns-779c5847bc-w62hz\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.490513 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.794432 4863 generic.go:334] "Generic (PLEG): container finished" podID="05b513fa-8a27-433e-85f1-7d252e0e6050" containerID="36c4112304fcce868336454395c84be64360e8c50b269d0f5e8ce5d1e343c821" exitCode=0 Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.794528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-tjj7d" event={"ID":"05b513fa-8a27-433e-85f1-7d252e0e6050","Type":"ContainerDied","Data":"36c4112304fcce868336454395c84be64360e8c50b269d0f5e8ce5d1e343c821"} Dec 05 07:07:54 crc kubenswrapper[4863]: I1205 07:07:54.945308 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:07:54 crc kubenswrapper[4863]: W1205 07:07:54.950929 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c05d0fb_2c0d_4641_89e7_5080a84c47fa.slice/crio-d9ca4a86916d9703580f72d9d87cd1b4c500a102885ec3a8196843a14beaa1f7 WatchSource:0}: Error finding container d9ca4a86916d9703580f72d9d87cd1b4c500a102885ec3a8196843a14beaa1f7: Status 404 returned error can't find the container with id d9ca4a86916d9703580f72d9d87cd1b4c500a102885ec3a8196843a14beaa1f7 Dec 05 07:07:55 crc kubenswrapper[4863]: I1205 07:07:55.803660 4863 generic.go:334] "Generic (PLEG): container finished" podID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerID="ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46" exitCode=0 Dec 05 07:07:55 crc kubenswrapper[4863]: I1205 07:07:55.803724 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" event={"ID":"0c05d0fb-2c0d-4641-89e7-5080a84c47fa","Type":"ContainerDied","Data":"ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46"} Dec 05 07:07:55 crc kubenswrapper[4863]: I1205 07:07:55.804202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" event={"ID":"0c05d0fb-2c0d-4641-89e7-5080a84c47fa","Type":"ContainerStarted","Data":"d9ca4a86916d9703580f72d9d87cd1b4c500a102885ec3a8196843a14beaa1f7"} Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.116236 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.256721 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tplg\" (UniqueName: \"kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg\") pod \"05b513fa-8a27-433e-85f1-7d252e0e6050\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.256874 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle\") pod \"05b513fa-8a27-433e-85f1-7d252e0e6050\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.256975 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data\") pod \"05b513fa-8a27-433e-85f1-7d252e0e6050\" (UID: \"05b513fa-8a27-433e-85f1-7d252e0e6050\") " Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.261377 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg" (OuterVolumeSpecName: "kube-api-access-5tplg") pod "05b513fa-8a27-433e-85f1-7d252e0e6050" (UID: "05b513fa-8a27-433e-85f1-7d252e0e6050"). InnerVolumeSpecName "kube-api-access-5tplg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.285012 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05b513fa-8a27-433e-85f1-7d252e0e6050" (UID: "05b513fa-8a27-433e-85f1-7d252e0e6050"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.299851 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data" (OuterVolumeSpecName: "config-data") pod "05b513fa-8a27-433e-85f1-7d252e0e6050" (UID: "05b513fa-8a27-433e-85f1-7d252e0e6050"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.359123 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tplg\" (UniqueName: \"kubernetes.io/projected/05b513fa-8a27-433e-85f1-7d252e0e6050-kube-api-access-5tplg\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.359154 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.359163 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b513fa-8a27-433e-85f1-7d252e0e6050-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.813548 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" event={"ID":"0c05d0fb-2c0d-4641-89e7-5080a84c47fa","Type":"ContainerStarted","Data":"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175"} Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.814038 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.815103 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-tjj7d" event={"ID":"05b513fa-8a27-433e-85f1-7d252e0e6050","Type":"ContainerDied","Data":"469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414"} Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.815128 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="469f15a5f31f2e0efba1ce4e3334a45e19ba5167ab630608cb78b19ac1a17414" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.815181 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-tjj7d" Dec 05 07:07:56 crc kubenswrapper[4863]: I1205 07:07:56.869448 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" podStartSLOduration=2.869423672 podStartE2EDuration="2.869423672s" podCreationTimestamp="2025-12-05 07:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:56.862920585 +0000 UTC m=+1304.588917645" watchObservedRunningTime="2025-12-05 07:07:56.869423672 +0000 UTC m=+1304.595420712" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.105446 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.126835 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q6h9m"] Dec 05 07:07:57 crc kubenswrapper[4863]: E1205 07:07:57.130852 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05b513fa-8a27-433e-85f1-7d252e0e6050" containerName="keystone-db-sync" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.130890 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="05b513fa-8a27-433e-85f1-7d252e0e6050" containerName="keystone-db-sync" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.131142 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="05b513fa-8a27-433e-85f1-7d252e0e6050" containerName="keystone-db-sync" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.131898 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.136928 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.137140 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.137296 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sbn4r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.137449 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.137657 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.152391 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q6h9m"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.211661 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.227883 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.264507 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.283761 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.283816 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq4nv\" (UniqueName: \"kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.283864 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.283958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.284029 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.284143 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.337996 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-6xkg4"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.339714 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.349188 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.349385 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.356193 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-brj55" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.357915 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-6xkg4"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.380349 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.384401 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385765 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385822 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8vv2\" (UniqueName: \"kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385850 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq4nv\" (UniqueName: \"kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385889 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385913 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385944 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.385975 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.386025 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.386075 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.386113 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.386136 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.386166 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.396260 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.399159 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.401060 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.405733 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.415643 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.420441 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.440146 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.467437 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.481765 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq4nv\" (UniqueName: \"kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv\") pod \"keystone-bootstrap-q6h9m\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489029 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489072 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489099 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489122 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489149 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489188 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489241 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8vv2\" (UniqueName: \"kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489282 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzmwk\" (UniqueName: \"kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489300 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489315 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489335 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489349 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489380 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489398 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489423 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpwv7\" (UniqueName: \"kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489437 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489455 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.489502 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.490446 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.491132 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.495439 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.495544 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.497035 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.497497 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.498715 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-7xsrt"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.499727 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.520011 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2vpsz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.520191 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.520737 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.542905 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8vv2\" (UniqueName: \"kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2\") pod \"dnsmasq-dns-78b9b4595f-qq2wz\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.560788 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-7xsrt"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.579700 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-vcb5r"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.580599 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vcb5r"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.580615 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-cfznd"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.581276 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.582000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.588519 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.595840 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.595885 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.595910 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.595928 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596115 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzmwk\" (UniqueName: \"kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596164 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596190 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596224 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596289 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596309 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596330 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596380 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpwv7\" (UniqueName: \"kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596398 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596434 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596506 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn7gf\" (UniqueName: \"kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596603 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.596699 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.597300 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.597922 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.598144 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.598591 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-85gv2" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.598733 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fht5c" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.599904 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.600062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.609410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.613177 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.619924 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.620189 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.621900 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.622733 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.630371 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.640062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.642570 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cfznd"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.657041 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzmwk\" (UniqueName: \"kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk\") pod \"cinder-db-sync-6xkg4\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.666665 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpwv7\" (UniqueName: \"kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7\") pod \"ceilometer-0\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.669863 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.686732 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.698725 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.698861 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpt7b\" (UniqueName: \"kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.698930 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s75ws\" (UniqueName: \"kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.698974 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699111 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699178 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699242 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn7gf\" (UniqueName: \"kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699389 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699420 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.699456 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.709492 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.711542 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.714557 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.716355 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.726121 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.727226 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn7gf\" (UniqueName: \"kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf\") pod \"neutron-db-sync-7xsrt\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.801901 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.801942 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.801964 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.801979 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802015 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802058 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpt7b\" (UniqueName: \"kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802105 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s75ws\" (UniqueName: \"kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802263 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802288 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802321 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802344 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802363 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh9tz\" (UniqueName: \"kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802380 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.802998 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.817806 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.818836 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.819146 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.819257 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.821440 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.821835 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.835967 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s75ws\" (UniqueName: \"kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws\") pod \"placement-db-sync-cfznd\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:57 crc kubenswrapper[4863]: I1205 07:07:57.853635 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpt7b\" (UniqueName: \"kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b\") pod \"barbican-db-sync-vcb5r\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.906730 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.906784 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.906840 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.907002 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.907083 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh9tz\" (UniqueName: \"kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.907107 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.907943 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.909052 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.914542 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.915250 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.924892 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.931566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.959145 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh9tz\" (UniqueName: \"kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz\") pod \"dnsmasq-dns-794c6877f7-fqn6d\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:57.998901 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cfznd" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.055850 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.063370 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.068409 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.103779 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q6h9m"] Dec 05 07:07:58 crc kubenswrapper[4863]: W1205 07:07:58.123965 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod961b1d72_04d4_4b9d_a9f8_576c4c882735.slice/crio-d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125 WatchSource:0}: Error finding container d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125: Status 404 returned error can't find the container with id d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125 Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.919160 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q6h9m" event={"ID":"961b1d72-04d4-4b9d-a9f8-576c4c882735","Type":"ContainerStarted","Data":"36eecc4422707dcc6d7f6c623f47769c1397206825994bc4f163e5856994d03b"} Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.920057 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q6h9m" event={"ID":"961b1d72-04d4-4b9d-a9f8-576c4c882735","Type":"ContainerStarted","Data":"d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125"} Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.921904 4863 generic.go:334] "Generic (PLEG): container finished" podID="33487ee1-3f7b-4829-b531-99cbc9e45888" containerID="3750ededc1df2ac8005c8d570819c5ee1d0c3e7001e77d6cc56948f75c204b8f" exitCode=0 Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.922135 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="dnsmasq-dns" containerID="cri-o://ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175" gracePeriod=10 Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.922622 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" event={"ID":"33487ee1-3f7b-4829-b531-99cbc9e45888","Type":"ContainerDied","Data":"3750ededc1df2ac8005c8d570819c5ee1d0c3e7001e77d6cc56948f75c204b8f"} Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.922676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" event={"ID":"33487ee1-3f7b-4829-b531-99cbc9e45888","Type":"ContainerStarted","Data":"cbef156b1591719f12fb8f82893a48c1a102c4069aa8bba8fe7bcf4a9fbee760"} Dec 05 07:07:58 crc kubenswrapper[4863]: I1205 07:07:58.983379 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q6h9m" podStartSLOduration=1.983357118 podStartE2EDuration="1.983357118s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:58.953227212 +0000 UTC m=+1306.679224252" watchObservedRunningTime="2025-12-05 07:07:58.983357118 +0000 UTC m=+1306.709354158" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.113993 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.121431 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-7xsrt"] Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.143195 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vcb5r"] Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.154234 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-6xkg4"] Dec 05 07:07:59 crc kubenswrapper[4863]: W1205 07:07:59.180924 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod853fd865_612f_4875_8c38_c6d67e486c0e.slice/crio-41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd WatchSource:0}: Error finding container 41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd: Status 404 returned error can't find the container with id 41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.196402 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:07:59 crc kubenswrapper[4863]: W1205 07:07:59.200932 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9723e99f_fe23_4e42_a037_c31bf410036a.slice/crio-3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7 WatchSource:0}: Error finding container 3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7: Status 404 returned error can't find the container with id 3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7 Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.206969 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cfznd"] Dec 05 07:07:59 crc kubenswrapper[4863]: W1205 07:07:59.216566 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63784fd1_baa1_4334_96cf_b1467c661030.slice/crio-0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697 WatchSource:0}: Error finding container 0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697: Status 404 returned error can't find the container with id 0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697 Dec 05 07:07:59 crc kubenswrapper[4863]: W1205 07:07:59.221864 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb21d1baa_e6ea_41dd_b76d_1d5f175476d9.slice/crio-1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923 WatchSource:0}: Error finding container 1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923: Status 404 returned error can't find the container with id 1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923 Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.325127 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.453964 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.454023 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.454080 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.454100 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8vv2\" (UniqueName: \"kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.454153 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.454281 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb\") pod \"33487ee1-3f7b-4829-b531-99cbc9e45888\" (UID: \"33487ee1-3f7b-4829-b531-99cbc9e45888\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.474621 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2" (OuterVolumeSpecName: "kube-api-access-s8vv2") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "kube-api-access-s8vv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.492837 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.501060 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.507256 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.509350 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.535992 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config" (OuterVolumeSpecName: "config") pod "33487ee1-3f7b-4829-b531-99cbc9e45888" (UID: "33487ee1-3f7b-4829-b531-99cbc9e45888"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555763 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555800 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555814 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555827 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555839 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8vv2\" (UniqueName: \"kubernetes.io/projected/33487ee1-3f7b-4829-b531-99cbc9e45888-kube-api-access-s8vv2\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.555853 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/33487ee1-3f7b-4829-b531-99cbc9e45888-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.622959 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657355 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657401 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657429 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657450 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m67m\" (UniqueName: \"kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657558 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.657676 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0\") pod \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\" (UID: \"0c05d0fb-2c0d-4641-89e7-5080a84c47fa\") " Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.703779 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m" (OuterVolumeSpecName: "kube-api-access-2m67m") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "kube-api-access-2m67m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.766437 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m67m\" (UniqueName: \"kubernetes.io/projected/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-kube-api-access-2m67m\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.778101 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config" (OuterVolumeSpecName: "config") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.794412 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.799804 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.800221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.809976 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0c05d0fb-2c0d-4641-89e7-5080a84c47fa" (UID: "0c05d0fb-2c0d-4641-89e7-5080a84c47fa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.823508 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.868451 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.868501 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.868513 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.868522 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.868531 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c05d0fb-2c0d-4641-89e7-5080a84c47fa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.953785 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6xkg4" event={"ID":"83a84e66-3bc2-4629-b251-40287f224f1b","Type":"ContainerStarted","Data":"b0645b3381e4721a674eb2f6fa63ec4bffeea1a92b8a859a5643ce6dbe7cbf54"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.959870 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-7xsrt" event={"ID":"853fd865-612f-4875-8c38-c6d67e486c0e","Type":"ContainerStarted","Data":"867f7cf9e88b1eb78b8dadfb5286d67bf858706a6a5792e5b1e2b0cc60117820"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.959914 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-7xsrt" event={"ID":"853fd865-612f-4875-8c38-c6d67e486c0e","Type":"ContainerStarted","Data":"41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.970309 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4xpt9" event={"ID":"a8b5f081-158a-4d6c-a16f-c1b90548ee63","Type":"ContainerStarted","Data":"fafe7909477cff71de085fadb4309c5acf55a0ecae93af65a21c8a8035c62191"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.976843 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerStarted","Data":"f39642cbc772739dd8d3082577de0059bff277c370b79bf3d8cb90edc04f2599"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.988025 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vcb5r" event={"ID":"9723e99f-fe23-4e42-a037-c31bf410036a","Type":"ContainerStarted","Data":"3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.990552 4863 generic.go:334] "Generic (PLEG): container finished" podID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerID="ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175" exitCode=0 Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.990595 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" event={"ID":"0c05d0fb-2c0d-4641-89e7-5080a84c47fa","Type":"ContainerDied","Data":"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.990646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" event={"ID":"0c05d0fb-2c0d-4641-89e7-5080a84c47fa","Type":"ContainerDied","Data":"d9ca4a86916d9703580f72d9d87cd1b4c500a102885ec3a8196843a14beaa1f7"} Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.990661 4863 scope.go:117] "RemoveContainer" containerID="ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175" Dec 05 07:07:59 crc kubenswrapper[4863]: I1205 07:07:59.990767 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-779c5847bc-w62hz" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.001332 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-7xsrt" podStartSLOduration=3.001244255 podStartE2EDuration="3.001244255s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:07:59.982678599 +0000 UTC m=+1307.708675639" watchObservedRunningTime="2025-12-05 07:08:00.001244255 +0000 UTC m=+1307.727241295" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.004465 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.005777 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78b9b4595f-qq2wz" event={"ID":"33487ee1-3f7b-4829-b531-99cbc9e45888","Type":"ContainerDied","Data":"cbef156b1591719f12fb8f82893a48c1a102c4069aa8bba8fe7bcf4a9fbee760"} Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.010048 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cfznd" event={"ID":"63784fd1-baa1-4334-96cf-b1467c661030","Type":"ContainerStarted","Data":"0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697"} Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.018042 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-4xpt9" podStartSLOduration=2.559327541 podStartE2EDuration="32.018020649s" podCreationTimestamp="2025-12-05 07:07:28 +0000 UTC" firstStartedPulling="2025-12-05 07:07:28.978562577 +0000 UTC m=+1276.704559617" lastFinishedPulling="2025-12-05 07:07:58.437255685 +0000 UTC m=+1306.163252725" observedRunningTime="2025-12-05 07:07:59.999296208 +0000 UTC m=+1307.725293248" watchObservedRunningTime="2025-12-05 07:08:00.018020649 +0000 UTC m=+1307.744017689" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.021044 4863 generic.go:334] "Generic (PLEG): container finished" podID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerID="30ce94e4e8fe29c9f76e65496a2a56a9a11c5957f2156dbc83e7b40c0fadd4f0" exitCode=0 Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.021932 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" event={"ID":"b21d1baa-e6ea-41dd-b76d-1d5f175476d9","Type":"ContainerDied","Data":"30ce94e4e8fe29c9f76e65496a2a56a9a11c5957f2156dbc83e7b40c0fadd4f0"} Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.021963 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" event={"ID":"b21d1baa-e6ea-41dd-b76d-1d5f175476d9","Type":"ContainerStarted","Data":"1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923"} Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.121095 4863 scope.go:117] "RemoveContainer" containerID="ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.254206 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.258877 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-779c5847bc-w62hz"] Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.314861 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.323297 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78b9b4595f-qq2wz"] Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.326828 4863 scope.go:117] "RemoveContainer" containerID="ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175" Dec 05 07:08:00 crc kubenswrapper[4863]: E1205 07:08:00.332163 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175\": container with ID starting with ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175 not found: ID does not exist" containerID="ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.332204 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175"} err="failed to get container status \"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175\": rpc error: code = NotFound desc = could not find container \"ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175\": container with ID starting with ae637f27a2c7b37b07e09afbcdf59d0fa29c6acdfdf84c7699d5067e8f2ce175 not found: ID does not exist" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.332233 4863 scope.go:117] "RemoveContainer" containerID="ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46" Dec 05 07:08:00 crc kubenswrapper[4863]: E1205 07:08:00.332620 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46\": container with ID starting with ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46 not found: ID does not exist" containerID="ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.332642 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46"} err="failed to get container status \"ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46\": rpc error: code = NotFound desc = could not find container \"ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46\": container with ID starting with ead3589b3f6a08275a55184e59d01f4be0ab92efc99f9d127f9021a920db1a46 not found: ID does not exist" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.332654 4863 scope.go:117] "RemoveContainer" containerID="3750ededc1df2ac8005c8d570819c5ee1d0c3e7001e77d6cc56948f75c204b8f" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.618362 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" path="/var/lib/kubelet/pods/0c05d0fb-2c0d-4641-89e7-5080a84c47fa/volumes" Dec 05 07:08:00 crc kubenswrapper[4863]: I1205 07:08:00.619390 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33487ee1-3f7b-4829-b531-99cbc9e45888" path="/var/lib/kubelet/pods/33487ee1-3f7b-4829-b531-99cbc9e45888/volumes" Dec 05 07:08:01 crc kubenswrapper[4863]: I1205 07:08:01.039105 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" event={"ID":"b21d1baa-e6ea-41dd-b76d-1d5f175476d9","Type":"ContainerStarted","Data":"e228beda85ec5d3d08fcbd997d0c278dbb12848353920c55e183080870ee1094"} Dec 05 07:08:01 crc kubenswrapper[4863]: I1205 07:08:01.071809 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" podStartSLOduration=4.071790629 podStartE2EDuration="4.071790629s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:01.059212597 +0000 UTC m=+1308.785209637" watchObservedRunningTime="2025-12-05 07:08:01.071790629 +0000 UTC m=+1308.797787689" Dec 05 07:08:02 crc kubenswrapper[4863]: I1205 07:08:02.049418 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:08:06 crc kubenswrapper[4863]: I1205 07:08:06.132154 4863 generic.go:334] "Generic (PLEG): container finished" podID="961b1d72-04d4-4b9d-a9f8-576c4c882735" containerID="36eecc4422707dcc6d7f6c623f47769c1397206825994bc4f163e5856994d03b" exitCode=0 Dec 05 07:08:06 crc kubenswrapper[4863]: I1205 07:08:06.132256 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q6h9m" event={"ID":"961b1d72-04d4-4b9d-a9f8-576c4c882735","Type":"ContainerDied","Data":"36eecc4422707dcc6d7f6c623f47769c1397206825994bc4f163e5856994d03b"} Dec 05 07:08:08 crc kubenswrapper[4863]: I1205 07:08:08.070711 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:08:08 crc kubenswrapper[4863]: I1205 07:08:08.206182 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:08:08 crc kubenswrapper[4863]: I1205 07:08:08.206446 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" containerID="cri-o://95223dcea418612ecb5a3e3dcc7de79b6b1662ef56c11682aa7a38808c146929" gracePeriod=10 Dec 05 07:08:08 crc kubenswrapper[4863]: I1205 07:08:08.464707 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:08:08 crc kubenswrapper[4863]: I1205 07:08:08.464771 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:08:09 crc kubenswrapper[4863]: I1205 07:08:09.191395 4863 generic.go:334] "Generic (PLEG): container finished" podID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerID="95223dcea418612ecb5a3e3dcc7de79b6b1662ef56c11682aa7a38808c146929" exitCode=0 Dec 05 07:08:09 crc kubenswrapper[4863]: I1205 07:08:09.191446 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" event={"ID":"6cc353f1-a71d-4983-b48f-81ac6140952b","Type":"ContainerDied","Data":"95223dcea418612ecb5a3e3dcc7de79b6b1662ef56c11682aa7a38808c146929"} Dec 05 07:08:09 crc kubenswrapper[4863]: I1205 07:08:09.955517 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.748447 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774547 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774600 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774669 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774717 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774763 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq4nv\" (UniqueName: \"kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.774914 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data\") pod \"961b1d72-04d4-4b9d-a9f8-576c4c882735\" (UID: \"961b1d72-04d4-4b9d-a9f8-576c4c882735\") " Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.790710 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.802046 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.802060 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv" (OuterVolumeSpecName: "kube-api-access-xq4nv") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "kube-api-access-xq4nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.805866 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts" (OuterVolumeSpecName: "scripts") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.819203 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.835487 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data" (OuterVolumeSpecName: "config-data") pod "961b1d72-04d4-4b9d-a9f8-576c4c882735" (UID: "961b1d72-04d4-4b9d-a9f8-576c4c882735"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876728 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876763 4863 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876779 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876790 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876800 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq4nv\" (UniqueName: \"kubernetes.io/projected/961b1d72-04d4-4b9d-a9f8-576c4c882735-kube-api-access-xq4nv\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:10 crc kubenswrapper[4863]: I1205 07:08:10.876809 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961b1d72-04d4-4b9d-a9f8-576c4c882735-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.211649 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q6h9m" event={"ID":"961b1d72-04d4-4b9d-a9f8-576c4c882735","Type":"ContainerDied","Data":"d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125"} Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.211688 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d91aabb682f54660620ae5eba57021580e6a1184c175a896889808c3d7ed7125" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.211722 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q6h9m" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.839575 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q6h9m"] Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.851115 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q6h9m"] Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.945839 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dlpwd"] Dec 05 07:08:11 crc kubenswrapper[4863]: E1205 07:08:11.946174 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="init" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.946191 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="init" Dec 05 07:08:11 crc kubenswrapper[4863]: E1205 07:08:11.946202 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33487ee1-3f7b-4829-b531-99cbc9e45888" containerName="init" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.946208 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="33487ee1-3f7b-4829-b531-99cbc9e45888" containerName="init" Dec 05 07:08:11 crc kubenswrapper[4863]: E1205 07:08:11.946225 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="dnsmasq-dns" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.946231 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="dnsmasq-dns" Dec 05 07:08:11 crc kubenswrapper[4863]: E1205 07:08:11.946255 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="961b1d72-04d4-4b9d-a9f8-576c4c882735" containerName="keystone-bootstrap" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.946261 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="961b1d72-04d4-4b9d-a9f8-576c4c882735" containerName="keystone-bootstrap" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.948562 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="33487ee1-3f7b-4829-b531-99cbc9e45888" containerName="init" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.948598 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c05d0fb-2c0d-4641-89e7-5080a84c47fa" containerName="dnsmasq-dns" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.948614 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="961b1d72-04d4-4b9d-a9f8-576c4c882735" containerName="keystone-bootstrap" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.949875 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.951615 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.951618 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.952051 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sbn4r" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.952121 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.954370 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.961294 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dlpwd"] Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998506 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998578 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998632 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xcqb\" (UniqueName: \"kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998663 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998841 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:11 crc kubenswrapper[4863]: I1205 07:08:11.998962 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.101652 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.102517 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.102593 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.102774 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xcqb\" (UniqueName: \"kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.102823 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.102873 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.109062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.109224 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.110044 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.110647 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.122280 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.123432 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xcqb\" (UniqueName: \"kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb\") pod \"keystone-bootstrap-dlpwd\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.272311 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:12 crc kubenswrapper[4863]: I1205 07:08:12.623650 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="961b1d72-04d4-4b9d-a9f8-576c4c882735" path="/var/lib/kubelet/pods/961b1d72-04d4-4b9d-a9f8-576c4c882735/volumes" Dec 05 07:08:14 crc kubenswrapper[4863]: I1205 07:08:14.955046 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Dec 05 07:08:16 crc kubenswrapper[4863]: I1205 07:08:16.271423 4863 generic.go:334] "Generic (PLEG): container finished" podID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" containerID="fafe7909477cff71de085fadb4309c5acf55a0ecae93af65a21c8a8035c62191" exitCode=0 Dec 05 07:08:16 crc kubenswrapper[4863]: I1205 07:08:16.271528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4xpt9" event={"ID":"a8b5f081-158a-4d6c-a16f-c1b90548ee63","Type":"ContainerDied","Data":"fafe7909477cff71de085fadb4309c5acf55a0ecae93af65a21c8a8035c62191"} Dec 05 07:08:19 crc kubenswrapper[4863]: I1205 07:08:19.955724 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Dec 05 07:08:19 crc kubenswrapper[4863]: I1205 07:08:19.956728 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:08:20 crc kubenswrapper[4863]: E1205 07:08:20.564905 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2" Dec 05 07:08:20 crc kubenswrapper[4863]: E1205 07:08:20.565143 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dzmwk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-6xkg4_openstack(83a84e66-3bc2-4629-b251-40287f224f1b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:08:20 crc kubenswrapper[4863]: E1205 07:08:20.566402 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-6xkg4" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" Dec 05 07:08:20 crc kubenswrapper[4863]: E1205 07:08:20.917115 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:43a24796dabde68270dbfefa107205e173fdd6a0dc701502858cadbede69da31" Dec 05 07:08:20 crc kubenswrapper[4863]: E1205 07:08:20.917280 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:43a24796dabde68270dbfefa107205e173fdd6a0dc701502858cadbede69da31,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n568h7fh5b5h558h5c8h55h594h556h55dh66fh56dh575h68ch66fh545hfbh687h66ch554h84h556h678hddh67h65h57dhc9h77hc7h569h59bhc4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lpwv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(1b1b222d-ab57-4b0f-ade3-2d5f625d4f65): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:08:21 crc kubenswrapper[4863]: E1205 07:08:21.318116 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2\\\"\"" pod="openstack/cinder-db-sync-6xkg4" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" Dec 05 07:08:21 crc kubenswrapper[4863]: E1205 07:08:21.441764 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f" Dec 05 07:08:21 crc kubenswrapper[4863]: E1205 07:08:21.441957 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vpt7b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-vcb5r_openstack(9723e99f-fe23-4e42-a037-c31bf410036a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 07:08:21 crc kubenswrapper[4863]: E1205 07:08:21.443151 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-vcb5r" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.628399 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4xpt9" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.636450 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.660529 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc\") pod \"6cc353f1-a71d-4983-b48f-81ac6140952b\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.660812 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb\") pod \"6cc353f1-a71d-4983-b48f-81ac6140952b\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661000 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgv5t\" (UniqueName: \"kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t\") pod \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661106 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle\") pod \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661501 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config\") pod \"6cc353f1-a71d-4983-b48f-81ac6140952b\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661600 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnkqn\" (UniqueName: \"kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn\") pod \"6cc353f1-a71d-4983-b48f-81ac6140952b\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661660 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb\") pod \"6cc353f1-a71d-4983-b48f-81ac6140952b\" (UID: \"6cc353f1-a71d-4983-b48f-81ac6140952b\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661806 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data\") pod \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.661881 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data\") pod \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\" (UID: \"a8b5f081-158a-4d6c-a16f-c1b90548ee63\") " Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.666597 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a8b5f081-158a-4d6c-a16f-c1b90548ee63" (UID: "a8b5f081-158a-4d6c-a16f-c1b90548ee63"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.673055 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t" (OuterVolumeSpecName: "kube-api-access-lgv5t") pod "a8b5f081-158a-4d6c-a16f-c1b90548ee63" (UID: "a8b5f081-158a-4d6c-a16f-c1b90548ee63"). InnerVolumeSpecName "kube-api-access-lgv5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.673178 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn" (OuterVolumeSpecName: "kube-api-access-nnkqn") pod "6cc353f1-a71d-4983-b48f-81ac6140952b" (UID: "6cc353f1-a71d-4983-b48f-81ac6140952b"). InnerVolumeSpecName "kube-api-access-nnkqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.710864 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8b5f081-158a-4d6c-a16f-c1b90548ee63" (UID: "a8b5f081-158a-4d6c-a16f-c1b90548ee63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.761694 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6cc353f1-a71d-4983-b48f-81ac6140952b" (UID: "6cc353f1-a71d-4983-b48f-81ac6140952b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.764942 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.764967 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.764979 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgv5t\" (UniqueName: \"kubernetes.io/projected/a8b5f081-158a-4d6c-a16f-c1b90548ee63-kube-api-access-lgv5t\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.764990 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.765000 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnkqn\" (UniqueName: \"kubernetes.io/projected/6cc353f1-a71d-4983-b48f-81ac6140952b-kube-api-access-nnkqn\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.768740 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6cc353f1-a71d-4983-b48f-81ac6140952b" (UID: "6cc353f1-a71d-4983-b48f-81ac6140952b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.774581 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6cc353f1-a71d-4983-b48f-81ac6140952b" (UID: "6cc353f1-a71d-4983-b48f-81ac6140952b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.774720 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data" (OuterVolumeSpecName: "config-data") pod "a8b5f081-158a-4d6c-a16f-c1b90548ee63" (UID: "a8b5f081-158a-4d6c-a16f-c1b90548ee63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.775178 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config" (OuterVolumeSpecName: "config") pod "6cc353f1-a71d-4983-b48f-81ac6140952b" (UID: "6cc353f1-a71d-4983-b48f-81ac6140952b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.867118 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.867170 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.867186 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8b5f081-158a-4d6c-a16f-c1b90548ee63-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.867199 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cc353f1-a71d-4983-b48f-81ac6140952b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:21 crc kubenswrapper[4863]: I1205 07:08:21.956049 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dlpwd"] Dec 05 07:08:22 crc kubenswrapper[4863]: W1205 07:08:22.130834 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod298bab26_0793_4998_a6da_b4df8db6ee59.slice/crio-6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8 WatchSource:0}: Error finding container 6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8: Status 404 returned error can't find the container with id 6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8 Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.144772 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.360700 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlpwd" event={"ID":"298bab26-0793-4998-a6da-b4df8db6ee59","Type":"ContainerStarted","Data":"6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8"} Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.381229 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.385583 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-w2qjk" event={"ID":"6cc353f1-a71d-4983-b48f-81ac6140952b","Type":"ContainerDied","Data":"89c82783c1732e803c4c457b73ab08a80843040c11fe189ababf774e0897e2c9"} Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.387235 4863 scope.go:117] "RemoveContainer" containerID="95223dcea418612ecb5a3e3dcc7de79b6b1662ef56c11682aa7a38808c146929" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.431961 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cfznd" event={"ID":"63784fd1-baa1-4334-96cf-b1467c661030","Type":"ContainerStarted","Data":"f25acbcdd0eb63080dbe931f5c4c38e2eb4c0c55d53e57c13682265c95473515"} Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.439064 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4xpt9" Dec 05 07:08:22 crc kubenswrapper[4863]: E1205 07:08:22.447452 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f\\\"\"" pod="openstack/barbican-db-sync-vcb5r" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.447706 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4xpt9" event={"ID":"a8b5f081-158a-4d6c-a16f-c1b90548ee63","Type":"ContainerDied","Data":"46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd"} Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.448502 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46e066e29ca0d896af2755a661a4be31462526c6afcf4b8c6a60e112f8dba5fd" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.483941 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-cfznd" podStartSLOduration=3.265638679 podStartE2EDuration="25.483918573s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="2025-12-05 07:07:59.22691836 +0000 UTC m=+1306.952915400" lastFinishedPulling="2025-12-05 07:08:21.445198254 +0000 UTC m=+1329.171195294" observedRunningTime="2025-12-05 07:08:22.453910001 +0000 UTC m=+1330.179907041" watchObservedRunningTime="2025-12-05 07:08:22.483918573 +0000 UTC m=+1330.209915613" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.486976 4863 scope.go:117] "RemoveContainer" containerID="a6b2b2cb296137cef37090d5a05db684b05bb2465c1155b26b4b695d0b2f239b" Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.504819 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.510490 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-w2qjk"] Dec 05 07:08:22 crc kubenswrapper[4863]: I1205 07:08:22.614010 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" path="/var/lib/kubelet/pods/6cc353f1-a71d-4983-b48f-81ac6140952b/volumes" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.013973 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:23 crc kubenswrapper[4863]: E1205 07:08:23.014396 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.014418 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" Dec 05 07:08:23 crc kubenswrapper[4863]: E1205 07:08:23.014440 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="init" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.014448 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="init" Dec 05 07:08:23 crc kubenswrapper[4863]: E1205 07:08:23.014467 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" containerName="glance-db-sync" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.014477 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" containerName="glance-db-sync" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.014706 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" containerName="glance-db-sync" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.014741 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cc353f1-a71d-4983-b48f-81ac6140952b" containerName="dnsmasq-dns" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.015801 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.051540 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085205 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085268 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085338 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085374 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdtqq\" (UniqueName: \"kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085411 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.085439 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.186941 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.187006 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.187087 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.187130 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdtqq\" (UniqueName: \"kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.187174 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.187205 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.188162 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.189167 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.189229 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.190288 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.192776 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.221343 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdtqq\" (UniqueName: \"kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq\") pod \"dnsmasq-dns-7884648fd9-nzwr4\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.359531 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.455725 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlpwd" event={"ID":"298bab26-0793-4998-a6da-b4df8db6ee59","Type":"ContainerStarted","Data":"287b7156913a5e1577175c3d7d4cdc1cb429b1b78e1d70d44014f06595459e29"} Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.462102 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerStarted","Data":"844cf3427eeefeacddefcfd694a14ce540876816a3b127aa7962e280a38268d9"} Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.482945 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dlpwd" podStartSLOduration=12.482923106 podStartE2EDuration="12.482923106s" podCreationTimestamp="2025-12-05 07:08:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:23.476133653 +0000 UTC m=+1331.202130703" watchObservedRunningTime="2025-12-05 07:08:23.482923106 +0000 UTC m=+1331.208920146" Dec 05 07:08:23 crc kubenswrapper[4863]: I1205 07:08:23.943855 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.068626 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.069968 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.072072 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6j46f" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.077028 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.077176 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.103431 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.163551 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.165524 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.167524 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.168558 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216052 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216115 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216135 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q869\" (UniqueName: \"kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216199 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216248 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216281 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.216296 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.317703 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318076 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q869\" (UniqueName: \"kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318140 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318176 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318210 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318253 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318283 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w4f4\" (UniqueName: \"kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318324 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318347 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318374 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318420 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318439 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318508 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.318913 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.320104 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.320813 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.323606 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.326305 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.333741 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.336935 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q869\" (UniqueName: \"kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.345140 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420655 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420728 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420771 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420824 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w4f4\" (UniqueName: \"kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420870 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420892 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.420977 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.423709 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.423963 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.424861 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.426315 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.427460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.432221 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.453844 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w4f4\" (UniqueName: \"kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.456290 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.477441 4863 generic.go:334] "Generic (PLEG): container finished" podID="63784fd1-baa1-4334-96cf-b1467c661030" containerID="f25acbcdd0eb63080dbe931f5c4c38e2eb4c0c55d53e57c13682265c95473515" exitCode=0 Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.477544 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cfznd" event={"ID":"63784fd1-baa1-4334-96cf-b1467c661030","Type":"ContainerDied","Data":"f25acbcdd0eb63080dbe931f5c4c38e2eb4c0c55d53e57c13682265c95473515"} Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.479853 4863 generic.go:334] "Generic (PLEG): container finished" podID="853fd865-612f-4875-8c38-c6d67e486c0e" containerID="867f7cf9e88b1eb78b8dadfb5286d67bf858706a6a5792e5b1e2b0cc60117820" exitCode=0 Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.479935 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-7xsrt" event={"ID":"853fd865-612f-4875-8c38-c6d67e486c0e","Type":"ContainerDied","Data":"867f7cf9e88b1eb78b8dadfb5286d67bf858706a6a5792e5b1e2b0cc60117820"} Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.481411 4863 generic.go:334] "Generic (PLEG): container finished" podID="bf850941-2947-4411-883e-a801c84f51b5" containerID="4e60244d96a9e351eb7686b1782a4677a2fda6eed63e22a85befcb71ffbf734b" exitCode=0 Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.482961 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" event={"ID":"bf850941-2947-4411-883e-a801c84f51b5","Type":"ContainerDied","Data":"4e60244d96a9e351eb7686b1782a4677a2fda6eed63e22a85befcb71ffbf734b"} Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.483019 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" event={"ID":"bf850941-2947-4411-883e-a801c84f51b5","Type":"ContainerStarted","Data":"58f716aa581bc1a7df31c98a9ab08b3eee353db5afaaed54e1510749e2f7987c"} Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.526019 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:24 crc kubenswrapper[4863]: I1205 07:08:24.546601 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.263675 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:25 crc kubenswrapper[4863]: W1205 07:08:25.267325 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod717f7bda_761c_472e_a4d2_156520ec7e1a.slice/crio-36fdcb1e35126582121ed2b608ebf914f7568332b48c0f43d6e962c6bdca4501 WatchSource:0}: Error finding container 36fdcb1e35126582121ed2b608ebf914f7568332b48c0f43d6e962c6bdca4501: Status 404 returned error can't find the container with id 36fdcb1e35126582121ed2b608ebf914f7568332b48c0f43d6e962c6bdca4501 Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.493650 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" event={"ID":"bf850941-2947-4411-883e-a801c84f51b5","Type":"ContainerStarted","Data":"4a1aee0554970e7341160fff7309863cdccf33a6c2aebff70276362b4a903d99"} Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.493864 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.495042 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerStarted","Data":"36fdcb1e35126582121ed2b608ebf914f7568332b48c0f43d6e962c6bdca4501"} Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.528997 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" podStartSLOduration=3.528973438 podStartE2EDuration="3.528973438s" podCreationTimestamp="2025-12-05 07:08:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:25.522625526 +0000 UTC m=+1333.248622586" watchObservedRunningTime="2025-12-05 07:08:25.528973438 +0000 UTC m=+1333.254970478" Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.826115 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.904756 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:25 crc kubenswrapper[4863]: I1205 07:08:25.964268 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.065222 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config\") pod \"853fd865-612f-4875-8c38-c6d67e486c0e\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.065395 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn7gf\" (UniqueName: \"kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf\") pod \"853fd865-612f-4875-8c38-c6d67e486c0e\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.065465 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle\") pod \"853fd865-612f-4875-8c38-c6d67e486c0e\" (UID: \"853fd865-612f-4875-8c38-c6d67e486c0e\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.074337 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf" (OuterVolumeSpecName: "kube-api-access-sn7gf") pod "853fd865-612f-4875-8c38-c6d67e486c0e" (UID: "853fd865-612f-4875-8c38-c6d67e486c0e"). InnerVolumeSpecName "kube-api-access-sn7gf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.124245 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config" (OuterVolumeSpecName: "config") pod "853fd865-612f-4875-8c38-c6d67e486c0e" (UID: "853fd865-612f-4875-8c38-c6d67e486c0e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.126576 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "853fd865-612f-4875-8c38-c6d67e486c0e" (UID: "853fd865-612f-4875-8c38-c6d67e486c0e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.155951 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.167627 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.167665 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn7gf\" (UniqueName: \"kubernetes.io/projected/853fd865-612f-4875-8c38-c6d67e486c0e-kube-api-access-sn7gf\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.167681 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/853fd865-612f-4875-8c38-c6d67e486c0e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: W1205 07:08:26.236609 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23aa90b3_db69_4d96_b9ed_b2cafe93839e.slice/crio-8536a0ee85a111293a4e4f7149a8780467fc58050aaa384d96266693426b9237 WatchSource:0}: Error finding container 8536a0ee85a111293a4e4f7149a8780467fc58050aaa384d96266693426b9237: Status 404 returned error can't find the container with id 8536a0ee85a111293a4e4f7149a8780467fc58050aaa384d96266693426b9237 Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.294122 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cfznd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.370139 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts\") pod \"63784fd1-baa1-4334-96cf-b1467c661030\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.370262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs\") pod \"63784fd1-baa1-4334-96cf-b1467c661030\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.370678 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs" (OuterVolumeSpecName: "logs") pod "63784fd1-baa1-4334-96cf-b1467c661030" (UID: "63784fd1-baa1-4334-96cf-b1467c661030"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.370758 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s75ws\" (UniqueName: \"kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws\") pod \"63784fd1-baa1-4334-96cf-b1467c661030\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.371195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data\") pod \"63784fd1-baa1-4334-96cf-b1467c661030\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.371232 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle\") pod \"63784fd1-baa1-4334-96cf-b1467c661030\" (UID: \"63784fd1-baa1-4334-96cf-b1467c661030\") " Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.371886 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63784fd1-baa1-4334-96cf-b1467c661030-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.374295 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws" (OuterVolumeSpecName: "kube-api-access-s75ws") pod "63784fd1-baa1-4334-96cf-b1467c661030" (UID: "63784fd1-baa1-4334-96cf-b1467c661030"). InnerVolumeSpecName "kube-api-access-s75ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.377111 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts" (OuterVolumeSpecName: "scripts") pod "63784fd1-baa1-4334-96cf-b1467c661030" (UID: "63784fd1-baa1-4334-96cf-b1467c661030"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.404034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data" (OuterVolumeSpecName: "config-data") pod "63784fd1-baa1-4334-96cf-b1467c661030" (UID: "63784fd1-baa1-4334-96cf-b1467c661030"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.407937 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63784fd1-baa1-4334-96cf-b1467c661030" (UID: "63784fd1-baa1-4334-96cf-b1467c661030"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.474049 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.474086 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.474101 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63784fd1-baa1-4334-96cf-b1467c661030-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.474112 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s75ws\" (UniqueName: \"kubernetes.io/projected/63784fd1-baa1-4334-96cf-b1467c661030-kube-api-access-s75ws\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.509193 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-7xsrt" event={"ID":"853fd865-612f-4875-8c38-c6d67e486c0e","Type":"ContainerDied","Data":"41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd"} Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.509243 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41da0698cc1bd1a426bf38b1e42ef58767415dcde44bb75396fc357f0f9c72cd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.509336 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-7xsrt" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.524925 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerStarted","Data":"8536a0ee85a111293a4e4f7149a8780467fc58050aaa384d96266693426b9237"} Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.538652 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cfznd" event={"ID":"63784fd1-baa1-4334-96cf-b1467c661030","Type":"ContainerDied","Data":"0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697"} Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.538691 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0db8a8e019d2b274a2413bbca7c29b0c0db1394c455132d2e1d5600384621697" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.538719 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cfznd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.544440 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerStarted","Data":"24ad6b2f37f6545ca44b1d4fd604e39965aa73123b4e59e20fca48935ab67d94"} Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.548722 4863 generic.go:334] "Generic (PLEG): container finished" podID="298bab26-0793-4998-a6da-b4df8db6ee59" containerID="287b7156913a5e1577175c3d7d4cdc1cb429b1b78e1d70d44014f06595459e29" exitCode=0 Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.548806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlpwd" event={"ID":"298bab26-0793-4998-a6da-b4df8db6ee59","Type":"ContainerDied","Data":"287b7156913a5e1577175c3d7d4cdc1cb429b1b78e1d70d44014f06595459e29"} Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.710367 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:08:26 crc kubenswrapper[4863]: E1205 07:08:26.710925 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="853fd865-612f-4875-8c38-c6d67e486c0e" containerName="neutron-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.710951 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="853fd865-612f-4875-8c38-c6d67e486c0e" containerName="neutron-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: E1205 07:08:26.710976 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63784fd1-baa1-4334-96cf-b1467c661030" containerName="placement-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.710985 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="63784fd1-baa1-4334-96cf-b1467c661030" containerName="placement-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.711239 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="853fd865-612f-4875-8c38-c6d67e486c0e" containerName="neutron-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.711277 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="63784fd1-baa1-4334-96cf-b1467c661030" containerName="placement-db-sync" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.712595 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.715860 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.716600 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.716817 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.717047 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.717199 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fht5c" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.724960 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.753152 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779690 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779754 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779842 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779874 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckhg5\" (UniqueName: \"kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779931 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.779976 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.811068 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.813238 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.825770 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.903025 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.903079 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.903139 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckhg5\" (UniqueName: \"kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.903163 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904752 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904797 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904830 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904897 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4nzx\" (UniqueName: \"kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904920 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904949 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.904978 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.905019 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.906345 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.910543 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.911068 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.911197 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.918285 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.918416 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:26 crc kubenswrapper[4863]: I1205 07:08:26.934229 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckhg5\" (UniqueName: \"kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5\") pod \"placement-54d5bbb8cd-6z8nd\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006429 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006522 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006604 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006681 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4nzx\" (UniqueName: \"kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006707 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.006732 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.007548 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.007681 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.007830 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.007828 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.007909 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.027287 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4nzx\" (UniqueName: \"kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx\") pod \"dnsmasq-dns-77f55878d5-gbzp7\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.043219 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.085627 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.087062 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.090900 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2vpsz" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.091243 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.091435 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.091677 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.098087 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.132079 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.214431 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.214583 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.214606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.214720 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.214764 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4rfm\" (UniqueName: \"kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.316069 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.316120 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.316170 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.316188 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4rfm\" (UniqueName: \"kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.316242 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.320842 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.321096 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.323284 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.330266 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.337094 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4rfm\" (UniqueName: \"kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm\") pod \"neutron-6c5ffb56f6-p58jf\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.417424 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:27 crc kubenswrapper[4863]: I1205 07:08:27.579174 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="dnsmasq-dns" containerID="cri-o://4a1aee0554970e7341160fff7309863cdccf33a6c2aebff70276362b4a903d99" gracePeriod=10 Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.587268 4863 generic.go:334] "Generic (PLEG): container finished" podID="bf850941-2947-4411-883e-a801c84f51b5" containerID="4a1aee0554970e7341160fff7309863cdccf33a6c2aebff70276362b4a903d99" exitCode=0 Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.588710 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" event={"ID":"bf850941-2947-4411-883e-a801c84f51b5","Type":"ContainerDied","Data":"4a1aee0554970e7341160fff7309863cdccf33a6c2aebff70276362b4a903d99"} Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.737911 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839344 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839400 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xcqb\" (UniqueName: \"kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839466 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839541 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839580 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.839635 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys\") pod \"298bab26-0793-4998-a6da-b4df8db6ee59\" (UID: \"298bab26-0793-4998-a6da-b4df8db6ee59\") " Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.850283 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb" (OuterVolumeSpecName: "kube-api-access-4xcqb") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "kube-api-access-4xcqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.851588 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.864509 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts" (OuterVolumeSpecName: "scripts") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.867582 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.874855 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data" (OuterVolumeSpecName: "config-data") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.886916 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "298bab26-0793-4998-a6da-b4df8db6ee59" (UID: "298bab26-0793-4998-a6da-b4df8db6ee59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941395 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xcqb\" (UniqueName: \"kubernetes.io/projected/298bab26-0793-4998-a6da-b4df8db6ee59-kube-api-access-4xcqb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941432 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941447 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941459 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941472 4863 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:28 crc kubenswrapper[4863]: I1205 07:08:28.941526 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/298bab26-0793-4998-a6da-b4df8db6ee59-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.396200 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:08:29 crc kubenswrapper[4863]: E1205 07:08:29.397026 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298bab26-0793-4998-a6da-b4df8db6ee59" containerName="keystone-bootstrap" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.397052 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="298bab26-0793-4998-a6da-b4df8db6ee59" containerName="keystone-bootstrap" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.397274 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="298bab26-0793-4998-a6da-b4df8db6ee59" containerName="keystone-bootstrap" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.398228 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.401039 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.401242 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.407604 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449394 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449494 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnnb7\" (UniqueName: \"kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449538 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449619 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449663 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449698 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.449734 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.551753 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.551866 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.551938 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.552027 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.552104 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.552190 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnnb7\" (UniqueName: \"kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.552245 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.556855 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.557340 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.556763 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.561406 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.562650 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.564278 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.578658 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnnb7\" (UniqueName: \"kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7\") pod \"neutron-659b8866f7-wfh8q\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.598884 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlpwd" event={"ID":"298bab26-0793-4998-a6da-b4df8db6ee59","Type":"ContainerDied","Data":"6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8"} Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.598924 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c28a17f4a9ce1e437e5950793d3e1359d061a330326fd1d72fa5eb049060ba8" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.598983 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlpwd" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.713574 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.831598 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.835460 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.838240 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.838778 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.838822 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sbn4r" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.838920 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.839649 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.839898 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.851496 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.959756 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960179 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960274 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960336 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960490 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960602 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5xq6\" (UniqueName: \"kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960655 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:29 crc kubenswrapper[4863]: I1205 07:08:29.960753 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.062797 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.062859 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.062888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.062928 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.062979 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5xq6\" (UniqueName: \"kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.063007 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.063059 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.063104 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.073436 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.073520 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.074051 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.074109 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.074123 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.074200 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.075688 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.091152 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5xq6\" (UniqueName: \"kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6\") pod \"keystone-558b46f87f-4r8fh\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.162438 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.615633 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-log" containerID="cri-o://24ad6b2f37f6545ca44b1d4fd604e39965aa73123b4e59e20fca48935ab67d94" gracePeriod=30 Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.615869 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-httpd" containerID="cri-o://7a77de7348198c887e13bd3b01a68457cb45d6ef03dcc8c8c36036976456700b" gracePeriod=30 Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.616364 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerStarted","Data":"7a77de7348198c887e13bd3b01a68457cb45d6ef03dcc8c8c36036976456700b"} Dec 05 07:08:30 crc kubenswrapper[4863]: I1205 07:08:30.648429 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.648407537 podStartE2EDuration="7.648407537s" podCreationTimestamp="2025-12-05 07:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:30.641589753 +0000 UTC m=+1338.367586803" watchObservedRunningTime="2025-12-05 07:08:30.648407537 +0000 UTC m=+1338.374404587" Dec 05 07:08:31 crc kubenswrapper[4863]: I1205 07:08:31.625896 4863 generic.go:334] "Generic (PLEG): container finished" podID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerID="24ad6b2f37f6545ca44b1d4fd604e39965aa73123b4e59e20fca48935ab67d94" exitCode=143 Dec 05 07:08:31 crc kubenswrapper[4863]: I1205 07:08:31.625990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerDied","Data":"24ad6b2f37f6545ca44b1d4fd604e39965aa73123b4e59e20fca48935ab67d94"} Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.647865 4863 generic.go:334] "Generic (PLEG): container finished" podID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerID="7a77de7348198c887e13bd3b01a68457cb45d6ef03dcc8c8c36036976456700b" exitCode=0 Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.648181 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerDied","Data":"7a77de7348198c887e13bd3b01a68457cb45d6ef03dcc8c8c36036976456700b"} Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.895392 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932246 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932319 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdtqq\" (UniqueName: \"kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932415 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932564 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932604 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.932651 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc\") pod \"bf850941-2947-4411-883e-a801c84f51b5\" (UID: \"bf850941-2947-4411-883e-a801c84f51b5\") " Dec 05 07:08:33 crc kubenswrapper[4863]: I1205 07:08:33.947289 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq" (OuterVolumeSpecName: "kube-api-access-qdtqq") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "kube-api-access-qdtqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.001983 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config" (OuterVolumeSpecName: "config") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.006598 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.036805 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.036830 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdtqq\" (UniqueName: \"kubernetes.io/projected/bf850941-2947-4411-883e-a801c84f51b5-kube-api-access-qdtqq\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.036841 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.063695 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.067796 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.099388 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.115435 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bf850941-2947-4411-883e-a801c84f51b5" (UID: "bf850941-2947-4411-883e-a801c84f51b5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.138747 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.138845 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.138901 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf850941-2947-4411-883e-a801c84f51b5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.159521 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242196 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242256 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242328 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q869\" (UniqueName: \"kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242382 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242411 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242427 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242453 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs\") pod \"717f7bda-761c-472e-a4d2-156520ec7e1a\" (UID: \"717f7bda-761c-472e-a4d2-156520ec7e1a\") " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.242998 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs" (OuterVolumeSpecName: "logs") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.243146 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.246830 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts" (OuterVolumeSpecName: "scripts") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.248758 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.252717 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869" (OuterVolumeSpecName: "kube-api-access-4q869") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "kube-api-access-4q869". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.270162 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.293235 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data" (OuterVolumeSpecName: "config-data") pod "717f7bda-761c-472e-a4d2-156520ec7e1a" (UID: "717f7bda-761c-472e-a4d2-156520ec7e1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344312 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344352 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344361 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f7bda-761c-472e-a4d2-156520ec7e1a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344370 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344380 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f7bda-761c-472e-a4d2-156520ec7e1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344390 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q869\" (UniqueName: \"kubernetes.io/projected/717f7bda-761c-472e-a4d2-156520ec7e1a-kube-api-access-4q869\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.344423 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.359684 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.365566 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.370106 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:08:34 crc kubenswrapper[4863]: W1205 07:08:34.370814 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod715aee05_cbc6_41e1_b7c7_247170eaca7b.slice/crio-0d6e9fe2cf482ba9b6d846f2a9a7bb3446d411b5d9dde8efb29ccdc361366e65 WatchSource:0}: Error finding container 0d6e9fe2cf482ba9b6d846f2a9a7bb3446d411b5d9dde8efb29ccdc361366e65: Status 404 returned error can't find the container with id 0d6e9fe2cf482ba9b6d846f2a9a7bb3446d411b5d9dde8efb29ccdc361366e65 Dec 05 07:08:34 crc kubenswrapper[4863]: W1205 07:08:34.381322 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26d1df4f_5673_4b66_ad39_6da15197ef72.slice/crio-747529b6cd19ad9be318c1eca48f2e228e5b26a298b6592e90c170426c5a4944 WatchSource:0}: Error finding container 747529b6cd19ad9be318c1eca48f2e228e5b26a298b6592e90c170426c5a4944: Status 404 returned error can't find the container with id 747529b6cd19ad9be318c1eca48f2e228e5b26a298b6592e90c170426c5a4944 Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.447161 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.538739 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:08:34 crc kubenswrapper[4863]: W1205 07:08:34.543417 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcb2529a_46f2_4b17_bb95_8ef2a119f222.slice/crio-637fd1fd417d45f1092909a3ff9c5c60d4cb1467c19a4499dd3b3e1386a038b2 WatchSource:0}: Error finding container 637fd1fd417d45f1092909a3ff9c5c60d4cb1467c19a4499dd3b3e1386a038b2: Status 404 returned error can't find the container with id 637fd1fd417d45f1092909a3ff9c5c60d4cb1467c19a4499dd3b3e1386a038b2 Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.660623 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"717f7bda-761c-472e-a4d2-156520ec7e1a","Type":"ContainerDied","Data":"36fdcb1e35126582121ed2b608ebf914f7568332b48c0f43d6e962c6bdca4501"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.660674 4863 scope.go:117] "RemoveContainer" containerID="7a77de7348198c887e13bd3b01a68457cb45d6ef03dcc8c8c36036976456700b" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.661033 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.664930 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerStarted","Data":"637fd1fd417d45f1092909a3ff9c5c60d4cb1467c19a4499dd3b3e1386a038b2"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.666460 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-558b46f87f-4r8fh" event={"ID":"26d1df4f-5673-4b66-ad39-6da15197ef72","Type":"ContainerStarted","Data":"747529b6cd19ad9be318c1eca48f2e228e5b26a298b6592e90c170426c5a4944"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.668092 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerStarted","Data":"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.668139 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerStarted","Data":"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.668149 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerStarted","Data":"f90b9fe12eca7bbc176403ccf5bdcfcee30006ee7dd004d1993f14e7fd82d51e"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.668218 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.668243 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.670731 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" event={"ID":"bf850941-2947-4411-883e-a801c84f51b5","Type":"ContainerDied","Data":"58f716aa581bc1a7df31c98a9ab08b3eee353db5afaaed54e1510749e2f7987c"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.670814 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.682565 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerStarted","Data":"93338d99ca2dbfaeeb0a02ac816791cf2d02894d08e76bb31829907cbcb73b4a"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.686368 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerStarted","Data":"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.690772 4863 generic.go:334] "Generic (PLEG): container finished" podID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerID="7e06fab029b73a65dfce5fc537fb8b009dca4931a4b053758311296136352d7d" exitCode=0 Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.690822 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" event={"ID":"715aee05-cbc6-41e1-b7c7-247170eaca7b","Type":"ContainerDied","Data":"7e06fab029b73a65dfce5fc537fb8b009dca4931a4b053758311296136352d7d"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.690855 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" event={"ID":"715aee05-cbc6-41e1-b7c7-247170eaca7b","Type":"ContainerStarted","Data":"0d6e9fe2cf482ba9b6d846f2a9a7bb3446d411b5d9dde8efb29ccdc361366e65"} Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.700259 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-54d5bbb8cd-6z8nd" podStartSLOduration=8.700233702 podStartE2EDuration="8.700233702s" podCreationTimestamp="2025-12-05 07:08:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:34.690167249 +0000 UTC m=+1342.416164289" watchObservedRunningTime="2025-12-05 07:08:34.700233702 +0000 UTC m=+1342.426230742" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.857646 4863 scope.go:117] "RemoveContainer" containerID="24ad6b2f37f6545ca44b1d4fd604e39965aa73123b4e59e20fca48935ab67d94" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.928842 4863 scope.go:117] "RemoveContainer" containerID="4a1aee0554970e7341160fff7309863cdccf33a6c2aebff70276362b4a903d99" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.936377 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.948642 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7884648fd9-nzwr4"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.957380 4863 scope.go:117] "RemoveContainer" containerID="4e60244d96a9e351eb7686b1782a4677a2fda6eed63e22a85befcb71ffbf734b" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.960398 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.968493 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981281 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:34 crc kubenswrapper[4863]: E1205 07:08:34.981729 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-httpd" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981747 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-httpd" Dec 05 07:08:34 crc kubenswrapper[4863]: E1205 07:08:34.981759 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-log" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981765 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-log" Dec 05 07:08:34 crc kubenswrapper[4863]: E1205 07:08:34.981783 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="init" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981789 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="init" Dec 05 07:08:34 crc kubenswrapper[4863]: E1205 07:08:34.981800 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="dnsmasq-dns" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981806 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="dnsmasq-dns" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981965 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="dnsmasq-dns" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981975 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-log" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.981991 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" containerName="glance-httpd" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.984181 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.986976 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 07:08:34 crc kubenswrapper[4863]: I1205 07:08:34.987102 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.017287 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058792 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058834 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058884 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058904 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clxtz\" (UniqueName: \"kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058933 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058953 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.058977 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.059021 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162638 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162730 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162796 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162851 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162872 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clxtz\" (UniqueName: \"kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162911 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.162941 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.163372 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.163706 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.164891 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.181298 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.181518 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.181869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.184824 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.187700 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clxtz\" (UniqueName: \"kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.617346 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.639070 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:08:35 crc kubenswrapper[4863]: W1205 07:08:35.643843 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0314b5f_6374_4df1_9d19_7a87ff04b4ae.slice/crio-eb8079418802b7f18ac5474a676608ac414f79e562475c64d15fc7d05abb3609 WatchSource:0}: Error finding container eb8079418802b7f18ac5474a676608ac414f79e562475c64d15fc7d05abb3609: Status 404 returned error can't find the container with id eb8079418802b7f18ac5474a676608ac414f79e562475c64d15fc7d05abb3609 Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.691829 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.701397 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerStarted","Data":"eb8079418802b7f18ac5474a676608ac414f79e562475c64d15fc7d05abb3609"} Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.706311 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" event={"ID":"715aee05-cbc6-41e1-b7c7-247170eaca7b","Type":"ContainerStarted","Data":"8ac9c137406b4443ee1a36b0528da792b08d9ad118d97635e400cf98f281489d"} Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.706401 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.714391 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6xkg4" event={"ID":"83a84e66-3bc2-4629-b251-40287f224f1b","Type":"ContainerStarted","Data":"7f0172ab698504d95c384d0d290700df45ad60c77f49b99f159ac68cb0831c65"} Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.733264 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerStarted","Data":"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4"} Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.735499 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-558b46f87f-4r8fh" event={"ID":"26d1df4f-5673-4b66-ad39-6da15197ef72","Type":"ContainerStarted","Data":"954c1f5c6657fcb41a451ef64463595a73b0405b651fcdb2833bcc61d54b9090"} Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.737891 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" podStartSLOduration=9.737877824 podStartE2EDuration="9.737877824s" podCreationTimestamp="2025-12-05 07:08:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:35.732348172 +0000 UTC m=+1343.458345212" watchObservedRunningTime="2025-12-05 07:08:35.737877824 +0000 UTC m=+1343.463874864" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.749798 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-6xkg4" podStartSLOduration=3.8947918599999998 podStartE2EDuration="38.749781291s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="2025-12-05 07:07:59.219826989 +0000 UTC m=+1306.945824029" lastFinishedPulling="2025-12-05 07:08:34.07481642 +0000 UTC m=+1341.800813460" observedRunningTime="2025-12-05 07:08:35.746059571 +0000 UTC m=+1343.472056611" watchObservedRunningTime="2025-12-05 07:08:35.749781291 +0000 UTC m=+1343.475778321" Dec 05 07:08:35 crc kubenswrapper[4863]: I1205 07:08:35.763623 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-558b46f87f-4r8fh" podStartSLOduration=6.763599314 podStartE2EDuration="6.763599314s" podCreationTimestamp="2025-12-05 07:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:35.75888297 +0000 UTC m=+1343.484880010" watchObservedRunningTime="2025-12-05 07:08:35.763599314 +0000 UTC m=+1343.489596354" Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.291203 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.620954 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="717f7bda-761c-472e-a4d2-156520ec7e1a" path="/var/lib/kubelet/pods/717f7bda-761c-472e-a4d2-156520ec7e1a/volumes" Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.622086 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf850941-2947-4411-883e-a801c84f51b5" path="/var/lib/kubelet/pods/bf850941-2947-4411-883e-a801c84f51b5/volumes" Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.762207 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerStarted","Data":"0ac7f84dadec5f62f9a9a572fce63100e20a410a9b099bc6e708f0c94a75ef35"} Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.764403 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerStarted","Data":"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c"} Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.767068 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerStarted","Data":"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99"} Dec 05 07:08:36 crc kubenswrapper[4863]: I1205 07:08:36.767362 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.777620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerStarted","Data":"295bddc777143f899e2f1dfc97126a15fe79c5d73e7475fbeded1e556507917e"} Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.778619 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.778636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerStarted","Data":"03d180447008c516094a59dbf16ff7dc80e25060351520b84cf364cc365f5a32"} Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.783535 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-log" containerID="cri-o://a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" gracePeriod=30 Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.784295 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerStarted","Data":"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9"} Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.784329 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerStarted","Data":"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773"} Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.785111 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.785169 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-httpd" containerID="cri-o://f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" gracePeriod=30 Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.805449 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6c5ffb56f6-p58jf" podStartSLOduration=10.805420774 podStartE2EDuration="10.805420774s" podCreationTimestamp="2025-12-05 07:08:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:37.801303125 +0000 UTC m=+1345.527300165" watchObservedRunningTime="2025-12-05 07:08:37.805420774 +0000 UTC m=+1345.531417814" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.830658 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-659b8866f7-wfh8q" podStartSLOduration=8.83063028 podStartE2EDuration="8.83063028s" podCreationTimestamp="2025-12-05 07:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:37.822104665 +0000 UTC m=+1345.548101725" watchObservedRunningTime="2025-12-05 07:08:37.83063028 +0000 UTC m=+1345.556627320" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.848104 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=14.848084581 podStartE2EDuration="14.848084581s" podCreationTimestamp="2025-12-05 07:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:37.846124853 +0000 UTC m=+1345.572121903" watchObservedRunningTime="2025-12-05 07:08:37.848084581 +0000 UTC m=+1345.574081621" Dec 05 07:08:37 crc kubenswrapper[4863]: I1205 07:08:37.878911 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.8788898830000003 podStartE2EDuration="3.878889883s" podCreationTimestamp="2025-12-05 07:08:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:37.869864725 +0000 UTC m=+1345.595861765" watchObservedRunningTime="2025-12-05 07:08:37.878889883 +0000 UTC m=+1345.604886923" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.354732 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.360941 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7884648fd9-nzwr4" podUID="bf850941-2947-4411-883e-a801c84f51b5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.143:5353: i/o timeout" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.464803 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.464918 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523093 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523217 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523248 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523315 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523414 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523447 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.523507 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w4f4\" (UniqueName: \"kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4\") pod \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\" (UID: \"23aa90b3-db69-4d96-b9ed-b2cafe93839e\") " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.526337 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs" (OuterVolumeSpecName: "logs") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.526607 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.529559 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4" (OuterVolumeSpecName: "kube-api-access-4w4f4") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "kube-api-access-4w4f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.529589 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts" (OuterVolumeSpecName: "scripts") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.531152 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.559999 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.571118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data" (OuterVolumeSpecName: "config-data") pod "23aa90b3-db69-4d96-b9ed-b2cafe93839e" (UID: "23aa90b3-db69-4d96-b9ed-b2cafe93839e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625373 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625420 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625432 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625440 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23aa90b3-db69-4d96-b9ed-b2cafe93839e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625450 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23aa90b3-db69-4d96-b9ed-b2cafe93839e-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625459 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w4f4\" (UniqueName: \"kubernetes.io/projected/23aa90b3-db69-4d96-b9ed-b2cafe93839e-kube-api-access-4w4f4\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.625819 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.644281 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.729609 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.795754 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vcb5r" event={"ID":"9723e99f-fe23-4e42-a037-c31bf410036a","Type":"ContainerStarted","Data":"642d7702d4942e791e57b7a86cd6a9203603d21c533f093681e960d05e24e97a"} Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.799854 4863 generic.go:334] "Generic (PLEG): container finished" podID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerID="f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" exitCode=0 Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.799882 4863 generic.go:334] "Generic (PLEG): container finished" podID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerID="a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" exitCode=143 Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.800663 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.801228 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerDied","Data":"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c"} Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.801257 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerDied","Data":"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec"} Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.801269 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23aa90b3-db69-4d96-b9ed-b2cafe93839e","Type":"ContainerDied","Data":"8536a0ee85a111293a4e4f7149a8780467fc58050aaa384d96266693426b9237"} Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.801286 4863 scope.go:117] "RemoveContainer" containerID="f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.818992 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-vcb5r" podStartSLOduration=2.987110735 podStartE2EDuration="41.818973677s" podCreationTimestamp="2025-12-05 07:07:57 +0000 UTC" firstStartedPulling="2025-12-05 07:07:59.226764116 +0000 UTC m=+1306.952761156" lastFinishedPulling="2025-12-05 07:08:38.058627058 +0000 UTC m=+1345.784624098" observedRunningTime="2025-12-05 07:08:38.816747953 +0000 UTC m=+1346.542745013" watchObservedRunningTime="2025-12-05 07:08:38.818973677 +0000 UTC m=+1346.544970717" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.837072 4863 scope.go:117] "RemoveContainer" containerID="a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.863838 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.878532 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.882389 4863 scope.go:117] "RemoveContainer" containerID="f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" Dec 05 07:08:38 crc kubenswrapper[4863]: E1205 07:08:38.891907 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c\": container with ID starting with f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c not found: ID does not exist" containerID="f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.891957 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c"} err="failed to get container status \"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c\": rpc error: code = NotFound desc = could not find container \"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c\": container with ID starting with f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c not found: ID does not exist" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.891986 4863 scope.go:117] "RemoveContainer" containerID="a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" Dec 05 07:08:38 crc kubenswrapper[4863]: E1205 07:08:38.894297 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec\": container with ID starting with a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec not found: ID does not exist" containerID="a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.894339 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec"} err="failed to get container status \"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec\": rpc error: code = NotFound desc = could not find container \"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec\": container with ID starting with a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec not found: ID does not exist" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.894367 4863 scope.go:117] "RemoveContainer" containerID="f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.895269 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c"} err="failed to get container status \"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c\": rpc error: code = NotFound desc = could not find container \"f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c\": container with ID starting with f8b589933d7b4b2e80f56acae6ee92388212b7589ecae98b1bb8ea12d701041c not found: ID does not exist" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.895318 4863 scope.go:117] "RemoveContainer" containerID="a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.896769 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec"} err="failed to get container status \"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec\": rpc error: code = NotFound desc = could not find container \"a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec\": container with ID starting with a3a3da4e93ac4efbe6f14bd6e3188920ef7120ee151f4e64b4272b717736caec not found: ID does not exist" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.904181 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:38 crc kubenswrapper[4863]: E1205 07:08:38.904703 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-httpd" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.904724 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-httpd" Dec 05 07:08:38 crc kubenswrapper[4863]: E1205 07:08:38.904769 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-log" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.904780 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-log" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.905011 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-log" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.905028 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" containerName="glance-httpd" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.906192 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.909020 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.912296 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 07:08:38 crc kubenswrapper[4863]: I1205 07:08:38.912595 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.034382 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038040 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038175 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038231 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wbgs\" (UniqueName: \"kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038329 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038390 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038511 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.038558 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.142925 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.142986 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143018 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143127 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143168 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143220 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143256 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wbgs\" (UniqueName: \"kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.143293 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.144522 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.144881 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.145134 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.148799 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.155943 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.156718 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.169753 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wbgs\" (UniqueName: \"kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.172899 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.214853 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.268770 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:39 crc kubenswrapper[4863]: I1205 07:08:39.917551 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:08:40 crc kubenswrapper[4863]: I1205 07:08:40.612158 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23aa90b3-db69-4d96-b9ed-b2cafe93839e" path="/var/lib/kubelet/pods/23aa90b3-db69-4d96-b9ed-b2cafe93839e/volumes" Dec 05 07:08:42 crc kubenswrapper[4863]: I1205 07:08:42.134633 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:08:42 crc kubenswrapper[4863]: I1205 07:08:42.195456 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:08:42 crc kubenswrapper[4863]: I1205 07:08:42.195729 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="dnsmasq-dns" containerID="cri-o://e228beda85ec5d3d08fcbd997d0c278dbb12848353920c55e183080870ee1094" gracePeriod=10 Dec 05 07:08:42 crc kubenswrapper[4863]: E1205 07:08:42.768399 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83a84e66_3bc2_4629_b251_40287f224f1b.slice/crio-7f0172ab698504d95c384d0d290700df45ad60c77f49b99f159ac68cb0831c65.scope\": RecentStats: unable to find data in memory cache]" Dec 05 07:08:42 crc kubenswrapper[4863]: I1205 07:08:42.841516 4863 generic.go:334] "Generic (PLEG): container finished" podID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerID="e228beda85ec5d3d08fcbd997d0c278dbb12848353920c55e183080870ee1094" exitCode=0 Dec 05 07:08:42 crc kubenswrapper[4863]: I1205 07:08:42.841572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" event={"ID":"b21d1baa-e6ea-41dd-b76d-1d5f175476d9","Type":"ContainerDied","Data":"e228beda85ec5d3d08fcbd997d0c278dbb12848353920c55e183080870ee1094"} Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.070168 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.141:5353: connect: connection refused" Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.852438 4863 generic.go:334] "Generic (PLEG): container finished" podID="83a84e66-3bc2-4629-b251-40287f224f1b" containerID="7f0172ab698504d95c384d0d290700df45ad60c77f49b99f159ac68cb0831c65" exitCode=0 Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.852520 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6xkg4" event={"ID":"83a84e66-3bc2-4629-b251-40287f224f1b","Type":"ContainerDied","Data":"7f0172ab698504d95c384d0d290700df45ad60c77f49b99f159ac68cb0831c65"} Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.855627 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" event={"ID":"b21d1baa-e6ea-41dd-b76d-1d5f175476d9","Type":"ContainerDied","Data":"1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923"} Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.855702 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f1de6373fb51a97c79c2672c0881978b18fe780e98172fbb20ff4381a2bb923" Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.857830 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerStarted","Data":"8bb27eb8eeb64f7d69c177abcb7d920aae7bd8b4b3b2976f3f707263aa86d34c"} Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.859391 4863 generic.go:334] "Generic (PLEG): container finished" podID="9723e99f-fe23-4e42-a037-c31bf410036a" containerID="642d7702d4942e791e57b7a86cd6a9203603d21c533f093681e960d05e24e97a" exitCode=0 Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.859434 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vcb5r" event={"ID":"9723e99f-fe23-4e42-a037-c31bf410036a","Type":"ContainerDied","Data":"642d7702d4942e791e57b7a86cd6a9203603d21c533f093681e960d05e24e97a"} Dec 05 07:08:43 crc kubenswrapper[4863]: I1205 07:08:43.977977 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh9tz\" (UniqueName: \"kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136229 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136271 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136396 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136501 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.136548 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb\") pod \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\" (UID: \"b21d1baa-e6ea-41dd-b76d-1d5f175476d9\") " Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.141947 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz" (OuterVolumeSpecName: "kube-api-access-kh9tz") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "kube-api-access-kh9tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: E1205 07:08:44.142909 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.182494 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.185110 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.186111 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.187052 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config" (OuterVolumeSpecName: "config") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.188909 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b21d1baa-e6ea-41dd-b76d-1d5f175476d9" (UID: "b21d1baa-e6ea-41dd-b76d-1d5f175476d9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238618 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh9tz\" (UniqueName: \"kubernetes.io/projected/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-kube-api-access-kh9tz\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238664 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238680 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238691 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238703 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.238714 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21d1baa-e6ea-41dd-b76d-1d5f175476d9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.871234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerStarted","Data":"f6e482cd6530ea79ed8f4591c10bb746bdc47ebe9575fc8d959a6297df953db2"} Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.871605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerStarted","Data":"d85bbeea8571b4f306cd156e8764d388f33336530169ed2a41e7f7469f038d9e"} Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.878687 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerStarted","Data":"815dfa16dedea85c7a9ab21068ce719e9954092d7086a6ce14313357086705fc"} Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.878881 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794c6877f7-fqn6d" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.879167 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="ceilometer-notification-agent" containerID="cri-o://844cf3427eeefeacddefcfd694a14ce540876816a3b127aa7962e280a38268d9" gracePeriod=30 Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.879176 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="proxy-httpd" containerID="cri-o://815dfa16dedea85c7a9ab21068ce719e9954092d7086a6ce14313357086705fc" gracePeriod=30 Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.879193 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="sg-core" containerID="cri-o://93338d99ca2dbfaeeb0a02ac816791cf2d02894d08e76bb31829907cbcb73b4a" gracePeriod=30 Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.915952 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.915931022 podStartE2EDuration="6.915931022s" podCreationTimestamp="2025-12-05 07:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:44.899937307 +0000 UTC m=+1352.625934347" watchObservedRunningTime="2025-12-05 07:08:44.915931022 +0000 UTC m=+1352.641928082" Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.933581 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:08:44 crc kubenswrapper[4863]: I1205 07:08:44.944979 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-794c6877f7-fqn6d"] Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.264547 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.270080 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.355137 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.355217 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.355272 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpt7b\" (UniqueName: \"kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b\") pod \"9723e99f-fe23-4e42-a037-c31bf410036a\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.355348 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzmwk\" (UniqueName: \"kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356033 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356083 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle\") pod \"9723e99f-fe23-4e42-a037-c31bf410036a\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356119 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data\") pod \"9723e99f-fe23-4e42-a037-c31bf410036a\" (UID: \"9723e99f-fe23-4e42-a037-c31bf410036a\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356365 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.356429 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle\") pod \"83a84e66-3bc2-4629-b251-40287f224f1b\" (UID: \"83a84e66-3bc2-4629-b251-40287f224f1b\") " Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.357769 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/83a84e66-3bc2-4629-b251-40287f224f1b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.361342 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b" (OuterVolumeSpecName: "kube-api-access-vpt7b") pod "9723e99f-fe23-4e42-a037-c31bf410036a" (UID: "9723e99f-fe23-4e42-a037-c31bf410036a"). InnerVolumeSpecName "kube-api-access-vpt7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.361433 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.361968 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9723e99f-fe23-4e42-a037-c31bf410036a" (UID: "9723e99f-fe23-4e42-a037-c31bf410036a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.362181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts" (OuterVolumeSpecName: "scripts") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.362364 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk" (OuterVolumeSpecName: "kube-api-access-dzmwk") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "kube-api-access-dzmwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.384372 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.385010 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9723e99f-fe23-4e42-a037-c31bf410036a" (UID: "9723e99f-fe23-4e42-a037-c31bf410036a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.417118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data" (OuterVolumeSpecName: "config-data") pod "83a84e66-3bc2-4629-b251-40287f224f1b" (UID: "83a84e66-3bc2-4629-b251-40287f224f1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459607 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459638 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459647 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459656 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpt7b\" (UniqueName: \"kubernetes.io/projected/9723e99f-fe23-4e42-a037-c31bf410036a-kube-api-access-vpt7b\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459665 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzmwk\" (UniqueName: \"kubernetes.io/projected/83a84e66-3bc2-4629-b251-40287f224f1b-kube-api-access-dzmwk\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459674 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83a84e66-3bc2-4629-b251-40287f224f1b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459683 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.459690 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9723e99f-fe23-4e42-a037-c31bf410036a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.693443 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.693515 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.731493 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.759862 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.920805 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerID="815dfa16dedea85c7a9ab21068ce719e9954092d7086a6ce14313357086705fc" exitCode=0 Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.920862 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerID="93338d99ca2dbfaeeb0a02ac816791cf2d02894d08e76bb31829907cbcb73b4a" exitCode=2 Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.920874 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerDied","Data":"815dfa16dedea85c7a9ab21068ce719e9954092d7086a6ce14313357086705fc"} Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.920933 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerDied","Data":"93338d99ca2dbfaeeb0a02ac816791cf2d02894d08e76bb31829907cbcb73b4a"} Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.926834 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vcb5r" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.926836 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vcb5r" event={"ID":"9723e99f-fe23-4e42-a037-c31bf410036a","Type":"ContainerDied","Data":"3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7"} Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.926977 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f0bbe853efe6900ad41286a52067535321106d77a942318f19d6ad43789afd7" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.928995 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6xkg4" event={"ID":"83a84e66-3bc2-4629-b251-40287f224f1b","Type":"ContainerDied","Data":"b0645b3381e4721a674eb2f6fa63ec4bffeea1a92b8a859a5643ce6dbe7cbf54"} Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.929050 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0645b3381e4721a674eb2f6fa63ec4bffeea1a92b8a859a5643ce6dbe7cbf54" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.929059 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6xkg4" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.929503 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 07:08:45 crc kubenswrapper[4863]: I1205 07:08:45.929533 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.208556 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:08:46 crc kubenswrapper[4863]: E1205 07:08:46.209346 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" containerName="barbican-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209363 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" containerName="barbican-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: E1205 07:08:46.209390 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="init" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209397 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="init" Dec 05 07:08:46 crc kubenswrapper[4863]: E1205 07:08:46.209417 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="dnsmasq-dns" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209424 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="dnsmasq-dns" Dec 05 07:08:46 crc kubenswrapper[4863]: E1205 07:08:46.209436 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" containerName="cinder-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209443 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" containerName="cinder-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209721 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" containerName="cinder-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209738 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" containerName="dnsmasq-dns" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.209748 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" containerName="barbican-db-sync" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.210910 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.223254 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.223461 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.223764 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-brj55" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.223926 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.224038 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.225745 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.228965 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-85gv2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.229277 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.229456 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.250541 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.251888 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.254183 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274695 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274737 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skb5k\" (UniqueName: \"kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274789 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvxzr\" (UniqueName: \"kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274803 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274820 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274842 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274866 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274882 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274898 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274928 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274947 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274975 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.274992 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.275010 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d7n4\" (UniqueName: \"kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.275038 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.277554 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.299239 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.307696 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.349490 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.351000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379407 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379480 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379520 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379539 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379561 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skb5k\" (UniqueName: \"kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379580 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvxzr\" (UniqueName: \"kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379596 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379611 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5r2t\" (UniqueName: \"kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379643 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379665 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379686 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379704 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379744 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379782 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379906 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379935 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379953 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379971 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d7n4\" (UniqueName: \"kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.379997 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.384422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.389967 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.390681 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.391076 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.394009 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.397485 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.401791 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.411557 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.411605 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.413274 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.413778 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.415071 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.417977 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.425382 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.450064 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvxzr\" (UniqueName: \"kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr\") pod \"cinder-scheduler-0\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.450877 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skb5k\" (UniqueName: \"kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k\") pod \"barbican-worker-594bb7dbb9-862q2\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.452971 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d7n4\" (UniqueName: \"kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4\") pod \"barbican-keystone-listener-bd7bc7b54-vzjx4\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.490556 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.490867 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.490888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5r2t\" (UniqueName: \"kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.491642 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.491674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.491918 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.491928 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.492483 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.492643 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.493223 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.493420 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.509536 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.510985 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.522337 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.527505 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5r2t\" (UniqueName: \"kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t\") pod \"dnsmasq-dns-b85f48765-tr5fp\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.533520 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.534178 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.558269 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.559592 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.561347 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.568183 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.587670 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.590822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592836 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhc9n\" (UniqueName: \"kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592859 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592930 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592948 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592975 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.592999 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.593016 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.593041 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.593075 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.593141 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq95x\" (UniqueName: \"kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.593171 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.601324 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.626873 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.633925 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21d1baa-e6ea-41dd-b76d-1d5f175476d9" path="/var/lib/kubelet/pods/b21d1baa-e6ea-41dd-b76d-1d5f175476d9/volumes" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.634701 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.650966 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.651077 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697546 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697604 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b76p2\" (UniqueName: \"kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq95x\" (UniqueName: \"kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697724 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697747 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697810 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhc9n\" (UniqueName: \"kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697927 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.697971 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698010 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698058 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698087 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698110 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698138 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.698175 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.700562 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.706058 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.706301 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.706458 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.712987 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.713095 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.713481 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.717382 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.721025 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhc9n\" (UniqueName: \"kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n\") pod \"cinder-api-0\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.723271 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.724547 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq95x\" (UniqueName: \"kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.726576 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data\") pod \"barbican-api-5f7dbb8d48-8tc5r\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799427 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b76p2\" (UniqueName: \"kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799519 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799560 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799590 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799634 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.799662 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.801909 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.802399 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.803115 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.803779 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.806539 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.819797 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b76p2\" (UniqueName: \"kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2\") pod \"dnsmasq-dns-77d8c9c7-tz6lg\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.940784 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.949938 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:08:46 crc kubenswrapper[4863]: I1205 07:08:46.984148 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.163157 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.276820 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.293268 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.307129 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.518072 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:08:47 crc kubenswrapper[4863]: W1205 07:08:47.529295 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb832ec47_54da_4553_890f_0a28c51c170a.slice/crio-5b2571c97fec0d9368f4f36057ac571ca47dda76ea934cbe4a34bdc853ef1e99 WatchSource:0}: Error finding container 5b2571c97fec0d9368f4f36057ac571ca47dda76ea934cbe4a34bdc853ef1e99: Status 404 returned error can't find the container with id 5b2571c97fec0d9368f4f36057ac571ca47dda76ea934cbe4a34bdc853ef1e99 Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.529644 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.540834 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.961046 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerStarted","Data":"659807f17edc9aa9a975ea8b89acc5230fdf5857a91414de58f4ddadc572b044"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.962926 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerStarted","Data":"efbfb9595668a4664aaebb91a98897ac887662d69a0da20dd984d1c9016179bb"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.965757 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerStarted","Data":"d7b3af1e9115ebbee604cee0520867054a7f71340e8e9363dad6615f21985057"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.966915 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerStarted","Data":"8fd5966c711d3c47fa960291c99e6c4e5c34688a919426bfb3f41ad8a6f2112e"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.968247 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerStarted","Data":"5b2571c97fec0d9368f4f36057ac571ca47dda76ea934cbe4a34bdc853ef1e99"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.969746 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" event={"ID":"d69b9a69-5339-4c6d-ab1c-1e390d959e6f","Type":"ContainerStarted","Data":"d197009deb0ad263019d16d986a81c918ac8eaeb135021e5afffefc8460fc1f5"} Dec 05 07:08:47 crc kubenswrapper[4863]: I1205 07:08:47.971378 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" event={"ID":"1fafb17b-c863-4727-83e7-c048d4cd47ed","Type":"ContainerStarted","Data":"0df8ad22c92fc9409042cb00153cf74823ab94edf317cb4d030e9c6807980080"} Dec 05 07:08:48 crc kubenswrapper[4863]: I1205 07:08:48.226236 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:08:48 crc kubenswrapper[4863]: I1205 07:08:48.404302 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 07:08:48 crc kubenswrapper[4863]: I1205 07:08:48.404416 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:08:48 crc kubenswrapper[4863]: I1205 07:08:48.422704 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 07:08:49 crc kubenswrapper[4863]: I1205 07:08:49.270189 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:49 crc kubenswrapper[4863]: I1205 07:08:49.271692 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:49 crc kubenswrapper[4863]: I1205 07:08:49.306375 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:49 crc kubenswrapper[4863]: I1205 07:08:49.335847 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.004369 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerStarted","Data":"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.004708 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.004725 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.004736 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerStarted","Data":"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.014525 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerID="844cf3427eeefeacddefcfd694a14ce540876816a3b127aa7962e280a38268d9" exitCode=0 Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.014602 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerDied","Data":"844cf3427eeefeacddefcfd694a14ce540876816a3b127aa7962e280a38268d9"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.019938 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerStarted","Data":"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.019983 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerStarted","Data":"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.020006 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api-log" containerID="cri-o://857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c" gracePeriod=30 Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.020037 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.020051 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api" containerID="cri-o://679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3" gracePeriod=30 Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.030979 4863 generic.go:334] "Generic (PLEG): container finished" podID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerID="8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785" exitCode=0 Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.031077 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" event={"ID":"d69b9a69-5339-4c6d-ab1c-1e390d959e6f","Type":"ContainerDied","Data":"8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.034225 4863 generic.go:334] "Generic (PLEG): container finished" podID="1fafb17b-c863-4727-83e7-c048d4cd47ed" containerID="36c0a13f0ba50c9fbe5a73235c9c1a600228aa3301fe473940d20aa5e429ca77" exitCode=0 Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.034626 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" podStartSLOduration=4.034613742 podStartE2EDuration="4.034613742s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:50.024036798 +0000 UTC m=+1357.750033838" watchObservedRunningTime="2025-12-05 07:08:50.034613742 +0000 UTC m=+1357.760610782" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.035356 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" event={"ID":"1fafb17b-c863-4727-83e7-c048d4cd47ed","Type":"ContainerDied","Data":"36c0a13f0ba50c9fbe5a73235c9c1a600228aa3301fe473940d20aa5e429ca77"} Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.035397 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.035653 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:50 crc kubenswrapper[4863]: I1205 07:08:50.074588 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.074566154 podStartE2EDuration="4.074566154s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:50.06567506 +0000 UTC m=+1357.791672100" watchObservedRunningTime="2025-12-05 07:08:50.074566154 +0000 UTC m=+1357.800563204" Dec 05 07:08:51 crc kubenswrapper[4863]: I1205 07:08:51.047391 4863 generic.go:334] "Generic (PLEG): container finished" podID="b832ec47-54da-4553-890f-0a28c51c170a" containerID="857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c" exitCode=143 Dec 05 07:08:51 crc kubenswrapper[4863]: I1205 07:08:51.047559 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerDied","Data":"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c"} Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.058039 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.058307 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.100232 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.118514 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.706573 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.708396 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.712376 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.712645 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.714347 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.716399 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.744056 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.793714 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.793830 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpwv7\" (UniqueName: \"kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.793940 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794042 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794075 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794101 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794141 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794222 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5r2t\" (UniqueName: \"kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794242 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794267 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794283 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0\") pod \"1fafb17b-c863-4727-83e7-c048d4cd47ed\" (UID: \"1fafb17b-c863-4727-83e7-c048d4cd47ed\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794311 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794333 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml\") pod \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\" (UID: \"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65\") " Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794527 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794571 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794596 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794615 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794659 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrqd5\" (UniqueName: \"kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794705 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.794724 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.795167 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.796130 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.802484 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts" (OuterVolumeSpecName: "scripts") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.806751 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7" (OuterVolumeSpecName: "kube-api-access-lpwv7") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "kube-api-access-lpwv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.813458 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t" (OuterVolumeSpecName: "kube-api-access-l5r2t") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "kube-api-access-l5r2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.847521 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config" (OuterVolumeSpecName: "config") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.850186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.853609 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.857592 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.862002 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1fafb17b-c863-4727-83e7-c048d4cd47ed" (UID: "1fafb17b-c863-4727-83e7-c048d4cd47ed"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.868204 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.885264 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895839 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895882 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrqd5\" (UniqueName: \"kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.895939 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896012 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896063 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5r2t\" (UniqueName: \"kubernetes.io/projected/1fafb17b-c863-4727-83e7-c048d4cd47ed-kube-api-access-l5r2t\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896073 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896081 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896090 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896097 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896105 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896113 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896121 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpwv7\" (UniqueName: \"kubernetes.io/projected/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-kube-api-access-lpwv7\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896130 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896138 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896145 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fafb17b-c863-4727-83e7-c048d4cd47ed-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.896153 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.897417 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.900345 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.900372 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.901069 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.901364 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.901642 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.915088 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrqd5\" (UniqueName: \"kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5\") pod \"barbican-api-776db75b76-jmjll\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:52 crc kubenswrapper[4863]: I1205 07:08:52.919868 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data" (OuterVolumeSpecName: "config-data") pod "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" (UID: "1b1b222d-ab57-4b0f-ade3-2d5f625d4f65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:52.999572 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.065567 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.072346 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" event={"ID":"d69b9a69-5339-4c6d-ab1c-1e390d959e6f","Type":"ContainerStarted","Data":"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37"} Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.072463 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.074266 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" event={"ID":"1fafb17b-c863-4727-83e7-c048d4cd47ed","Type":"ContainerDied","Data":"0df8ad22c92fc9409042cb00153cf74823ab94edf317cb4d030e9c6807980080"} Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.074321 4863 scope.go:117] "RemoveContainer" containerID="36c0a13f0ba50c9fbe5a73235c9c1a600228aa3301fe473940d20aa5e429ca77" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.074463 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b85f48765-tr5fp" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.077271 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerStarted","Data":"8c2f71b77923ff40514479e86a2a93d8a6db31c3b4b1aa7bee31460d5cfebb4a"} Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.083367 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1b1b222d-ab57-4b0f-ade3-2d5f625d4f65","Type":"ContainerDied","Data":"f39642cbc772739dd8d3082577de0059bff277c370b79bf3d8cb90edc04f2599"} Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.083496 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.092187 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerStarted","Data":"2d5bab542db02ef97af154097578dd404d6d1379735eb1ccb0151edc08a521c2"} Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.095499 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" podStartSLOduration=7.095464902 podStartE2EDuration="7.095464902s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:53.090251336 +0000 UTC m=+1360.816248376" watchObservedRunningTime="2025-12-05 07:08:53.095464902 +0000 UTC m=+1360.821461942" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.140730 4863 scope.go:117] "RemoveContainer" containerID="815dfa16dedea85c7a9ab21068ce719e9954092d7086a6ce14313357086705fc" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.155703 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.165706 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b85f48765-tr5fp"] Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.208100 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.208173 4863 scope.go:117] "RemoveContainer" containerID="93338d99ca2dbfaeeb0a02ac816791cf2d02894d08e76bb31829907cbcb73b4a" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.217742 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.223628 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:08:53 crc kubenswrapper[4863]: E1205 07:08:53.224232 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="ceilometer-notification-agent" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224250 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="ceilometer-notification-agent" Dec 05 07:08:53 crc kubenswrapper[4863]: E1205 07:08:53.224284 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="sg-core" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224292 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="sg-core" Dec 05 07:08:53 crc kubenswrapper[4863]: E1205 07:08:53.224310 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fafb17b-c863-4727-83e7-c048d4cd47ed" containerName="init" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224317 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fafb17b-c863-4727-83e7-c048d4cd47ed" containerName="init" Dec 05 07:08:53 crc kubenswrapper[4863]: E1205 07:08:53.224344 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="proxy-httpd" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224353 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="proxy-httpd" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224609 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="sg-core" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224628 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="proxy-httpd" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224648 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" containerName="ceilometer-notification-agent" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.224665 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fafb17b-c863-4727-83e7-c048d4cd47ed" containerName="init" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.229348 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.233411 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.233975 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.234233 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.260838 4863 scope.go:117] "RemoveContainer" containerID="844cf3427eeefeacddefcfd694a14ce540876816a3b127aa7962e280a38268d9" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304156 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304208 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304241 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304316 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304387 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65vlj\" (UniqueName: \"kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304443 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.304524 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.406488 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.413840 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65vlj\" (UniqueName: \"kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.413294 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.413913 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414044 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414104 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414151 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414177 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414746 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.414966 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.418948 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.422246 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.424454 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.432370 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65vlj\" (UniqueName: \"kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj\") pod \"ceilometer-0\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.553706 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:08:53 crc kubenswrapper[4863]: I1205 07:08:53.658077 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.064110 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:08:54 crc kubenswrapper[4863]: W1205 07:08:54.069729 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27 WatchSource:0}: Error finding container c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27: Status 404 returned error can't find the container with id c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27 Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.114604 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerStarted","Data":"c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.120155 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerStarted","Data":"07329e6fbf3ef6acbf027dc83e5cf92b53de3edc0cffa4a41162cabe931ecb30"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.122379 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerStarted","Data":"3f24d24ddecde0c03610f95481ed132c7db37024414b9adb37509ec427cedc45"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.124809 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerStarted","Data":"f9cbbd75cf4e441651ca07035330ebfbbf0163c249a21a5f6b8d9986b0edde6a"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.130424 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerStarted","Data":"de7ae6772610824286f4a36b1feda8b4ab485b788e3f580e7258992e82f32077"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.148099 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerStarted","Data":"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4"} Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.162282 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" podStartSLOduration=2.898244683 podStartE2EDuration="8.162258205s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="2025-12-05 07:08:47.296193558 +0000 UTC m=+1355.022190598" lastFinishedPulling="2025-12-05 07:08:52.56020708 +0000 UTC m=+1360.286204120" observedRunningTime="2025-12-05 07:08:54.146423671 +0000 UTC m=+1361.872420761" watchObservedRunningTime="2025-12-05 07:08:54.162258205 +0000 UTC m=+1361.888255265" Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.181650 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-594bb7dbb9-862q2" podStartSLOduration=2.908826619 podStartE2EDuration="8.181619605s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="2025-12-05 07:08:47.295860659 +0000 UTC m=+1355.021857689" lastFinishedPulling="2025-12-05 07:08:52.568653635 +0000 UTC m=+1360.294650675" observedRunningTime="2025-12-05 07:08:54.171679584 +0000 UTC m=+1361.897676624" watchObservedRunningTime="2025-12-05 07:08:54.181619605 +0000 UTC m=+1361.907616655" Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.621321 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b1b222d-ab57-4b0f-ade3-2d5f625d4f65" path="/var/lib/kubelet/pods/1b1b222d-ab57-4b0f-ade3-2d5f625d4f65/volumes" Dec 05 07:08:54 crc kubenswrapper[4863]: I1205 07:08:54.622550 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fafb17b-c863-4727-83e7-c048d4cd47ed" path="/var/lib/kubelet/pods/1fafb17b-c863-4727-83e7-c048d4cd47ed/volumes" Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.157170 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerStarted","Data":"9ee5b5095f60b3fcf7b47a34050adc8fc3929d01b34264ecd58fb01bc25bd387"} Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.157577 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.157596 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.161430 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerStarted","Data":"2981c9d5d3508af76d8c7dd845f35c2c30760629a86f54a79e179d5722391038"} Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.163780 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerStarted","Data":"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961"} Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.189373 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-776db75b76-jmjll" podStartSLOduration=3.189348456 podStartE2EDuration="3.189348456s" podCreationTimestamp="2025-12-05 07:08:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:08:55.17673618 +0000 UTC m=+1362.902733260" watchObservedRunningTime="2025-12-05 07:08:55.189348456 +0000 UTC m=+1362.915345516" Dec 05 07:08:55 crc kubenswrapper[4863]: I1205 07:08:55.220652 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.958284832 podStartE2EDuration="9.220619414s" podCreationTimestamp="2025-12-05 07:08:46 +0000 UTC" firstStartedPulling="2025-12-05 07:08:47.296266589 +0000 UTC m=+1355.022263629" lastFinishedPulling="2025-12-05 07:08:52.558601171 +0000 UTC m=+1360.284598211" observedRunningTime="2025-12-05 07:08:55.213722197 +0000 UTC m=+1362.939719237" watchObservedRunningTime="2025-12-05 07:08:55.220619414 +0000 UTC m=+1362.946616464" Dec 05 07:08:56 crc kubenswrapper[4863]: I1205 07:08:56.174275 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerStarted","Data":"370445ceeb8f94782a2e211301f0fce343098c373bfa53e4307c56c95a72a7ec"} Dec 05 07:08:56 crc kubenswrapper[4863]: I1205 07:08:56.569090 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 07:08:57 crc kubenswrapper[4863]: I1205 07:08:57.195041 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerStarted","Data":"b6394afd1c2e593d121cbf48fbf10799e037a7d0b24121f3c849cf5bb20da032"} Dec 05 07:08:57 crc kubenswrapper[4863]: I1205 07:08:57.433901 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:08:58 crc kubenswrapper[4863]: I1205 07:08:58.207552 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerStarted","Data":"9ec15c3bc7fa4352f86a98b71d34e6f0aa6b906b20444261296ea736afd8e249"} Dec 05 07:08:58 crc kubenswrapper[4863]: I1205 07:08:58.208870 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 07:08:58 crc kubenswrapper[4863]: I1205 07:08:58.255850 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9599020120000001 podStartE2EDuration="5.255832399s" podCreationTimestamp="2025-12-05 07:08:53 +0000 UTC" firstStartedPulling="2025-12-05 07:08:54.071969296 +0000 UTC m=+1361.797966326" lastFinishedPulling="2025-12-05 07:08:57.367899673 +0000 UTC m=+1365.093896713" observedRunningTime="2025-12-05 07:08:58.247979988 +0000 UTC m=+1365.973977048" watchObservedRunningTime="2025-12-05 07:08:58.255832399 +0000 UTC m=+1365.981829439" Dec 05 07:08:58 crc kubenswrapper[4863]: I1205 07:08:58.911250 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:58 crc kubenswrapper[4863]: I1205 07:08:58.916268 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.096004 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.149005 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.171380 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.737187 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.812254 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.812524 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c5ffb56f6-p58jf" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-api" containerID="cri-o://03d180447008c516094a59dbf16ff7dc80e25060351520b84cf364cc365f5a32" gracePeriod=30 Dec 05 07:08:59 crc kubenswrapper[4863]: I1205 07:08:59.812634 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c5ffb56f6-p58jf" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-httpd" containerID="cri-o://295bddc777143f899e2f1dfc97126a15fe79c5d73e7475fbeded1e556507917e" gracePeriod=30 Dec 05 07:09:00 crc kubenswrapper[4863]: I1205 07:09:00.231372 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerID="295bddc777143f899e2f1dfc97126a15fe79c5d73e7475fbeded1e556507917e" exitCode=0 Dec 05 07:09:00 crc kubenswrapper[4863]: I1205 07:09:00.231431 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerDied","Data":"295bddc777143f899e2f1dfc97126a15fe79c5d73e7475fbeded1e556507917e"} Dec 05 07:09:01 crc kubenswrapper[4863]: I1205 07:09:01.773748 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 07:09:01 crc kubenswrapper[4863]: I1205 07:09:01.835657 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:01 crc kubenswrapper[4863]: I1205 07:09:01.987650 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.060702 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.061947 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="dnsmasq-dns" containerID="cri-o://8ac9c137406b4443ee1a36b0528da792b08d9ad118d97635e400cf98f281489d" gracePeriod=10 Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.137036 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.147:5353: connect: connection refused" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.208605 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.284034 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerID="03d180447008c516094a59dbf16ff7dc80e25060351520b84cf364cc365f5a32" exitCode=0 Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.284104 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerDied","Data":"03d180447008c516094a59dbf16ff7dc80e25060351520b84cf364cc365f5a32"} Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.322705 4863 generic.go:334] "Generic (PLEG): container finished" podID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerID="8ac9c137406b4443ee1a36b0528da792b08d9ad118d97635e400cf98f281489d" exitCode=0 Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.322930 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="cinder-scheduler" containerID="cri-o://bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4" gracePeriod=30 Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.323221 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" event={"ID":"715aee05-cbc6-41e1-b7c7-247170eaca7b","Type":"ContainerDied","Data":"8ac9c137406b4443ee1a36b0528da792b08d9ad118d97635e400cf98f281489d"} Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.323454 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="probe" containerID="cri-o://5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961" gracePeriod=30 Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.383751 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.533192 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs\") pod \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.533297 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4rfm\" (UniqueName: \"kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm\") pod \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.533400 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle\") pod \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.533455 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config\") pod \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.533623 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config\") pod \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\" (UID: \"c0314b5f-6374-4df1-9d19-7a87ff04b4ae\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.549136 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm" (OuterVolumeSpecName: "kube-api-access-r4rfm") pod "c0314b5f-6374-4df1-9d19-7a87ff04b4ae" (UID: "c0314b5f-6374-4df1-9d19-7a87ff04b4ae"). InnerVolumeSpecName "kube-api-access-r4rfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.554703 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c0314b5f-6374-4df1-9d19-7a87ff04b4ae" (UID: "c0314b5f-6374-4df1-9d19-7a87ff04b4ae"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.635630 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.635664 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4rfm\" (UniqueName: \"kubernetes.io/projected/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-kube-api-access-r4rfm\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.635766 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0314b5f-6374-4df1-9d19-7a87ff04b4ae" (UID: "c0314b5f-6374-4df1-9d19-7a87ff04b4ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.680273 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.774013 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.775505 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.775622 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.775814 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.775994 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.776143 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4nzx\" (UniqueName: \"kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx\") pod \"715aee05-cbc6-41e1-b7c7-247170eaca7b\" (UID: \"715aee05-cbc6-41e1-b7c7-247170eaca7b\") " Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.776625 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.781621 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config" (OuterVolumeSpecName: "config") pod "c0314b5f-6374-4df1-9d19-7a87ff04b4ae" (UID: "c0314b5f-6374-4df1-9d19-7a87ff04b4ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.802067 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx" (OuterVolumeSpecName: "kube-api-access-n4nzx") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "kube-api-access-n4nzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.856210 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.880078 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.880122 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.880135 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4nzx\" (UniqueName: \"kubernetes.io/projected/715aee05-cbc6-41e1-b7c7-247170eaca7b-kube-api-access-n4nzx\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.889435 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c0314b5f-6374-4df1-9d19-7a87ff04b4ae" (UID: "c0314b5f-6374-4df1-9d19-7a87ff04b4ae"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.898979 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.930057 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.970163 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.977016 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config" (OuterVolumeSpecName: "config") pod "715aee05-cbc6-41e1-b7c7-247170eaca7b" (UID: "715aee05-cbc6-41e1-b7c7-247170eaca7b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.982013 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.982060 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.982075 4863 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0314b5f-6374-4df1-9d19-7a87ff04b4ae-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.982088 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:02 crc kubenswrapper[4863]: I1205 07:09:02.982100 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/715aee05-cbc6-41e1-b7c7-247170eaca7b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.334330 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c5ffb56f6-p58jf" event={"ID":"c0314b5f-6374-4df1-9d19-7a87ff04b4ae","Type":"ContainerDied","Data":"eb8079418802b7f18ac5474a676608ac414f79e562475c64d15fc7d05abb3609"} Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.334382 4863 scope.go:117] "RemoveContainer" containerID="295bddc777143f899e2f1dfc97126a15fe79c5d73e7475fbeded1e556507917e" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.334518 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c5ffb56f6-p58jf" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.341696 4863 generic.go:334] "Generic (PLEG): container finished" podID="7028cf34-b62b-48ea-b90d-53b175729e15" containerID="5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961" exitCode=0 Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.341848 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerDied","Data":"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961"} Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.350155 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" event={"ID":"715aee05-cbc6-41e1-b7c7-247170eaca7b","Type":"ContainerDied","Data":"0d6e9fe2cf482ba9b6d846f2a9a7bb3446d411b5d9dde8efb29ccdc361366e65"} Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.350288 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f55878d5-gbzp7" Dec 05 07:09:03 crc kubenswrapper[4863]: E1205 07:09:03.353665 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7028cf34_b62b_48ea_b90d_53b175729e15.slice/crio-5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7028cf34_b62b_48ea_b90d_53b175729e15.slice/crio-conmon-5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961.scope\": RecentStats: unable to find data in memory cache]" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.373634 4863 scope.go:117] "RemoveContainer" containerID="03d180447008c516094a59dbf16ff7dc80e25060351520b84cf364cc365f5a32" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.386882 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.396421 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6c5ffb56f6-p58jf"] Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.406306 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.415158 4863 scope.go:117] "RemoveContainer" containerID="8ac9c137406b4443ee1a36b0528da792b08d9ad118d97635e400cf98f281489d" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.447579 4863 scope.go:117] "RemoveContainer" containerID="7e06fab029b73a65dfce5fc537fb8b009dca4931a4b053758311296136352d7d" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.453015 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77f55878d5-gbzp7"] Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.552518 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 07:09:03 crc kubenswrapper[4863]: E1205 07:09:03.552929 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-api" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.552944 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-api" Dec 05 07:09:03 crc kubenswrapper[4863]: E1205 07:09:03.552966 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="dnsmasq-dns" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.552972 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="dnsmasq-dns" Dec 05 07:09:03 crc kubenswrapper[4863]: E1205 07:09:03.552990 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="init" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.552995 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="init" Dec 05 07:09:03 crc kubenswrapper[4863]: E1205 07:09:03.553005 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-httpd" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.553012 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-httpd" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.553171 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-httpd" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.553186 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" containerName="dnsmasq-dns" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.553198 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" containerName="neutron-api" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.553772 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.559195 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.559516 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-95snj" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.559634 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.566044 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.591138 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.591206 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.591225 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk64l\" (UniqueName: \"kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.591295 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.693182 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.693280 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.693309 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk64l\" (UniqueName: \"kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.693420 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.694875 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.699132 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.699712 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.713158 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk64l\" (UniqueName: \"kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l\") pod \"openstackclient\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " pod="openstack/openstackclient" Dec 05 07:09:03 crc kubenswrapper[4863]: I1205 07:09:03.892882 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 07:09:04 crc kubenswrapper[4863]: W1205 07:09:04.494780 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2a23ba4_22d9_4750_8d39_53dff19bc328.slice/crio-df8e5dd84f2b1347a5a313f67e760c6a12d8506a7a8fcc1e133327e80af14f67 WatchSource:0}: Error finding container df8e5dd84f2b1347a5a313f67e760c6a12d8506a7a8fcc1e133327e80af14f67: Status 404 returned error can't find the container with id df8e5dd84f2b1347a5a313f67e760c6a12d8506a7a8fcc1e133327e80af14f67 Dec 05 07:09:04 crc kubenswrapper[4863]: I1205 07:09:04.505234 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 07:09:04 crc kubenswrapper[4863]: I1205 07:09:04.616811 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="715aee05-cbc6-41e1-b7c7-247170eaca7b" path="/var/lib/kubelet/pods/715aee05-cbc6-41e1-b7c7-247170eaca7b/volumes" Dec 05 07:09:04 crc kubenswrapper[4863]: I1205 07:09:04.617615 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0314b5f-6374-4df1-9d19-7a87ff04b4ae" path="/var/lib/kubelet/pods/c0314b5f-6374-4df1-9d19-7a87ff04b4ae/volumes" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.422101 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.432927 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433035 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433105 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433163 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvxzr\" (UniqueName: \"kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433214 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433319 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom\") pod \"7028cf34-b62b-48ea-b90d-53b175729e15\" (UID: \"7028cf34-b62b-48ea-b90d-53b175729e15\") " Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433401 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.433845 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7028cf34-b62b-48ea-b90d-53b175729e15-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.439210 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts" (OuterVolumeSpecName: "scripts") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.447019 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.449154 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr" (OuterVolumeSpecName: "kube-api-access-tvxzr") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "kube-api-access-tvxzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.471001 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.497953 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e2a23ba4-22d9-4750-8d39-53dff19bc328","Type":"ContainerStarted","Data":"df8e5dd84f2b1347a5a313f67e760c6a12d8506a7a8fcc1e133327e80af14f67"} Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.531685 4863 generic.go:334] "Generic (PLEG): container finished" podID="7028cf34-b62b-48ea-b90d-53b175729e15" containerID="bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4" exitCode=0 Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.531721 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerDied","Data":"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4"} Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.531746 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7028cf34-b62b-48ea-b90d-53b175729e15","Type":"ContainerDied","Data":"8fd5966c711d3c47fa960291c99e6c4e5c34688a919426bfb3f41ad8a6f2112e"} Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.531765 4863 scope.go:117] "RemoveContainer" containerID="5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.532048 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.535888 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.535931 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvxzr\" (UniqueName: \"kubernetes.io/projected/7028cf34-b62b-48ea-b90d-53b175729e15-kube-api-access-tvxzr\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.535947 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.546364 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.569037 4863 scope.go:117] "RemoveContainer" containerID="bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.571017 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.617649 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.622358 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api-log" containerID="cri-o://4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6" gracePeriod=30 Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.622918 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api" containerID="cri-o://6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f" gracePeriod=30 Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.624777 4863 scope.go:117] "RemoveContainer" containerID="5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961" Dec 05 07:09:05 crc kubenswrapper[4863]: E1205 07:09:05.630920 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961\": container with ID starting with 5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961 not found: ID does not exist" containerID="5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.630973 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961"} err="failed to get container status \"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961\": rpc error: code = NotFound desc = could not find container \"5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961\": container with ID starting with 5aac8436a4c0365883a52fd2f61b30e5e0b4c84417acc891ba0688d4f21c1961 not found: ID does not exist" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.631003 4863 scope.go:117] "RemoveContainer" containerID="bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4" Dec 05 07:09:05 crc kubenswrapper[4863]: E1205 07:09:05.635055 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4\": container with ID starting with bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4 not found: ID does not exist" containerID="bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.635105 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4"} err="failed to get container status \"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4\": rpc error: code = NotFound desc = could not find container \"bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4\": container with ID starting with bd5ea2f8233f2cd8c7408f26e5c50fafaa682e74c6a22d350e13e346e29eb7d4 not found: ID does not exist" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.639539 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.700640 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data" (OuterVolumeSpecName: "config-data") pod "7028cf34-b62b-48ea-b90d-53b175729e15" (UID: "7028cf34-b62b-48ea-b90d-53b175729e15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.743820 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7028cf34-b62b-48ea-b90d-53b175729e15-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.940542 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.953553 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.971276 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:05 crc kubenswrapper[4863]: E1205 07:09:05.971788 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="cinder-scheduler" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.971807 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="cinder-scheduler" Dec 05 07:09:05 crc kubenswrapper[4863]: E1205 07:09:05.971834 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="probe" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.971867 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="probe" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.972148 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="cinder-scheduler" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.972184 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" containerName="probe" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.992369 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.992505 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:09:05 crc kubenswrapper[4863]: I1205 07:09:05.995834 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047537 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5mrc\" (UniqueName: \"kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047681 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047816 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047839 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.047890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.149806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.149902 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5mrc\" (UniqueName: \"kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.149930 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.149985 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.150039 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.150063 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.150066 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.155993 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.157991 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.163912 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.169637 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5mrc\" (UniqueName: \"kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.173065 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.326127 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.607743 4863 generic.go:334] "Generic (PLEG): container finished" podID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerID="4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6" exitCode=143 Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.611376 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7028cf34-b62b-48ea-b90d-53b175729e15" path="/var/lib/kubelet/pods/7028cf34-b62b-48ea-b90d-53b175729e15/volumes" Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.611969 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerDied","Data":"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6"} Dec 05 07:09:06 crc kubenswrapper[4863]: I1205 07:09:06.859532 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:09:06 crc kubenswrapper[4863]: W1205 07:09:06.869382 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcee685a_e1e9_4dd8_b04d_c5719c9bf771.slice/crio-cb2f597865964174fd799611b005b9e59ae5b272be4de78fc68244bdc47c2ccd WatchSource:0}: Error finding container cb2f597865964174fd799611b005b9e59ae5b272be4de78fc68244bdc47c2ccd: Status 404 returned error can't find the container with id cb2f597865964174fd799611b005b9e59ae5b272be4de78fc68244bdc47c2ccd Dec 05 07:09:07 crc kubenswrapper[4863]: I1205 07:09:07.625259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerStarted","Data":"cb2f597865964174fd799611b005b9e59ae5b272be4de78fc68244bdc47c2ccd"} Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.463841 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.464373 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.464424 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.465238 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.465294 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251" gracePeriod=600 Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.641063 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251" exitCode=0 Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.641127 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251"} Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.641159 4863 scope.go:117] "RemoveContainer" containerID="ab7f58a6592494973161b698d02f116307d958edfd4568a004df574f2c07d6ff" Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.644489 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerStarted","Data":"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b"} Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.644764 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerStarted","Data":"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e"} Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.833729 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:42678->10.217.0.157:9311: read: connection reset by peer" Dec 05 07:09:08 crc kubenswrapper[4863]: I1205 07:09:08.833744 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:42676->10.217.0.157:9311: read: connection reset by peer" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.228847 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.249986 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.249965904 podStartE2EDuration="4.249965904s" podCreationTimestamp="2025-12-05 07:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:08.66673806 +0000 UTC m=+1376.392735120" watchObservedRunningTime="2025-12-05 07:09:09.249965904 +0000 UTC m=+1376.975962954" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.416684 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle\") pod \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.416865 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs\") pod \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.416916 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq95x\" (UniqueName: \"kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x\") pod \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.416944 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data\") pod \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.416996 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom\") pod \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\" (UID: \"f2268af2-d8bb-40f9-ab1e-b47123bcf809\") " Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.417557 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs" (OuterVolumeSpecName: "logs") pod "f2268af2-d8bb-40f9-ab1e-b47123bcf809" (UID: "f2268af2-d8bb-40f9-ab1e-b47123bcf809"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.421991 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x" (OuterVolumeSpecName: "kube-api-access-wq95x") pod "f2268af2-d8bb-40f9-ab1e-b47123bcf809" (UID: "f2268af2-d8bb-40f9-ab1e-b47123bcf809"). InnerVolumeSpecName "kube-api-access-wq95x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.437582 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f2268af2-d8bb-40f9-ab1e-b47123bcf809" (UID: "f2268af2-d8bb-40f9-ab1e-b47123bcf809"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.491213 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data" (OuterVolumeSpecName: "config-data") pod "f2268af2-d8bb-40f9-ab1e-b47123bcf809" (UID: "f2268af2-d8bb-40f9-ab1e-b47123bcf809"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.515005 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2268af2-d8bb-40f9-ab1e-b47123bcf809" (UID: "f2268af2-d8bb-40f9-ab1e-b47123bcf809"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.519302 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2268af2-d8bb-40f9-ab1e-b47123bcf809-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.519345 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq95x\" (UniqueName: \"kubernetes.io/projected/f2268af2-d8bb-40f9-ab1e-b47123bcf809-kube-api-access-wq95x\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.519358 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.519370 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.519380 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2268af2-d8bb-40f9-ab1e-b47123bcf809-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.655724 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9"} Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.659319 4863 generic.go:334] "Generic (PLEG): container finished" podID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerID="6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f" exitCode=0 Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.659383 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.659414 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerDied","Data":"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f"} Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.659463 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f7dbb8d48-8tc5r" event={"ID":"f2268af2-d8bb-40f9-ab1e-b47123bcf809","Type":"ContainerDied","Data":"659807f17edc9aa9a975ea8b89acc5230fdf5857a91414de58f4ddadc572b044"} Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.659499 4863 scope.go:117] "RemoveContainer" containerID="6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.692581 4863 scope.go:117] "RemoveContainer" containerID="4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.694376 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.701133 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5f7dbb8d48-8tc5r"] Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.722671 4863 scope.go:117] "RemoveContainer" containerID="6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f" Dec 05 07:09:09 crc kubenswrapper[4863]: E1205 07:09:09.724518 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f\": container with ID starting with 6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f not found: ID does not exist" containerID="6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.724555 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f"} err="failed to get container status \"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f\": rpc error: code = NotFound desc = could not find container \"6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f\": container with ID starting with 6657f1e0ec831f546a309ebb7207de2bc2c49da2e53e40c24a0bdcd56e43608f not found: ID does not exist" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.724574 4863 scope.go:117] "RemoveContainer" containerID="4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6" Dec 05 07:09:09 crc kubenswrapper[4863]: E1205 07:09:09.725409 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6\": container with ID starting with 4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6 not found: ID does not exist" containerID="4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6" Dec 05 07:09:09 crc kubenswrapper[4863]: I1205 07:09:09.725436 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6"} err="failed to get container status \"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6\": rpc error: code = NotFound desc = could not find container \"4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6\": container with ID starting with 4fbb6e0cc7641016f653ed6d3fc6e863a3eef371d751d1677c3923e5d09b4db6 not found: ID does not exist" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.517540 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:09:10 crc kubenswrapper[4863]: E1205 07:09:10.518300 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.518324 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api" Dec 05 07:09:10 crc kubenswrapper[4863]: E1205 07:09:10.518376 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api-log" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.518386 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api-log" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.518687 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api-log" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.518727 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" containerName="barbican-api" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.520192 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.530445 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.574069 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.574346 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.574491 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.618463 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2268af2-d8bb-40f9-ab1e-b47123bcf809" path="/var/lib/kubelet/pods/f2268af2-d8bb-40f9-ab1e-b47123bcf809/volumes" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678608 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678719 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678799 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678842 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbhbb\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678871 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678917 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.678982 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.780895 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.780956 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.780982 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781036 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781109 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781140 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbhbb\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781163 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781185 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781375 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.781869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.786006 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.789977 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.790387 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.790861 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.799857 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbhbb\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.820512 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle\") pod \"swift-proxy-ff9694557-gb857\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:10 crc kubenswrapper[4863]: I1205 07:09:10.885853 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:11 crc kubenswrapper[4863]: I1205 07:09:11.327797 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 07:09:11 crc kubenswrapper[4863]: I1205 07:09:11.408649 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:09:11 crc kubenswrapper[4863]: I1205 07:09:11.691823 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerStarted","Data":"e7f3886aba1af8ae1cb3a212d1e99200a5cd25ba247cc6a87638e031a97028a4"} Dec 05 07:09:12 crc kubenswrapper[4863]: I1205 07:09:12.705224 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerStarted","Data":"f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd"} Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.009415 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.009704 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-central-agent" containerID="cri-o://2981c9d5d3508af76d8c7dd845f35c2c30760629a86f54a79e179d5722391038" gracePeriod=30 Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.009754 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="proxy-httpd" containerID="cri-o://9ec15c3bc7fa4352f86a98b71d34e6f0aa6b906b20444261296ea736afd8e249" gracePeriod=30 Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.009773 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-notification-agent" containerID="cri-o://370445ceeb8f94782a2e211301f0fce343098c373bfa53e4307c56c95a72a7ec" gracePeriod=30 Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.009754 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="sg-core" containerID="cri-o://b6394afd1c2e593d121cbf48fbf10799e037a7d0b24121f3c849cf5bb20da032" gracePeriod=30 Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.016124 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.720687 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerID="b6394afd1c2e593d121cbf48fbf10799e037a7d0b24121f3c849cf5bb20da032" exitCode=2 Dec 05 07:09:13 crc kubenswrapper[4863]: I1205 07:09:13.720757 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerDied","Data":"b6394afd1c2e593d121cbf48fbf10799e037a7d0b24121f3c849cf5bb20da032"} Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.757836 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerID="9ec15c3bc7fa4352f86a98b71d34e6f0aa6b906b20444261296ea736afd8e249" exitCode=0 Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.758199 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerID="370445ceeb8f94782a2e211301f0fce343098c373bfa53e4307c56c95a72a7ec" exitCode=0 Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.758212 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerID="2981c9d5d3508af76d8c7dd845f35c2c30760629a86f54a79e179d5722391038" exitCode=0 Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.758238 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerDied","Data":"9ec15c3bc7fa4352f86a98b71d34e6f0aa6b906b20444261296ea736afd8e249"} Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.758267 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerDied","Data":"370445ceeb8f94782a2e211301f0fce343098c373bfa53e4307c56c95a72a7ec"} Dec 05 07:09:14 crc kubenswrapper[4863]: I1205 07:09:14.758283 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerDied","Data":"2981c9d5d3508af76d8c7dd845f35c2c30760629a86f54a79e179d5722391038"} Dec 05 07:09:16 crc kubenswrapper[4863]: I1205 07:09:16.637651 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 07:09:16 crc kubenswrapper[4863]: I1205 07:09:16.996386 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:16 crc kubenswrapper[4863]: I1205 07:09:16.996645 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="1b04fc39-eb66-4cee-a14f-8162314e456f" containerName="kube-state-metrics" containerID="cri-o://4638cb5d3dfc21e835291a1866346f8cfc7bd2242e8dbe86d9e6e40d7488200e" gracePeriod=30 Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.715812 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.795731 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e2a23ba4-22d9-4750-8d39-53dff19bc328","Type":"ContainerStarted","Data":"c2aaeca34f1d08b6ba77903d3594d5cd3cc71ad58a3b1e953f6d010118039c7d"} Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.811072 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerStarted","Data":"ac2c5273ab207bce433edc60614777727533cb3043b62817be91b5f725b80074"} Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.812130 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.812459 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.814755 4863 generic.go:334] "Generic (PLEG): container finished" podID="1b04fc39-eb66-4cee-a14f-8162314e456f" containerID="4638cb5d3dfc21e835291a1866346f8cfc7bd2242e8dbe86d9e6e40d7488200e" exitCode=2 Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.814808 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1b04fc39-eb66-4cee-a14f-8162314e456f","Type":"ContainerDied","Data":"4638cb5d3dfc21e835291a1866346f8cfc7bd2242e8dbe86d9e6e40d7488200e"} Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.826311 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.837233691 podStartE2EDuration="14.826286981s" podCreationTimestamp="2025-12-05 07:09:03 +0000 UTC" firstStartedPulling="2025-12-05 07:09:04.503184949 +0000 UTC m=+1372.229181989" lastFinishedPulling="2025-12-05 07:09:17.492238239 +0000 UTC m=+1385.218235279" observedRunningTime="2025-12-05 07:09:17.823724378 +0000 UTC m=+1385.549721418" watchObservedRunningTime="2025-12-05 07:09:17.826286981 +0000 UTC m=+1385.552284021" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.834122 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-ff9694557-gb857" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.845745 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc154801-3bb7-4d1b-8165-10a6c5dcea55","Type":"ContainerDied","Data":"c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27"} Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.845799 4863 scope.go:117] "RemoveContainer" containerID="9ec15c3bc7fa4352f86a98b71d34e6f0aa6b906b20444261296ea736afd8e249" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.845829 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.850559 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.850640 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.851941 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.852077 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.852152 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.852210 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65vlj\" (UniqueName: \"kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.852229 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd\") pod \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\" (UID: \"fc154801-3bb7-4d1b-8165-10a6c5dcea55\") " Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.859919 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.861103 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.866239 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts" (OuterVolumeSpecName: "scripts") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.866904 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj" (OuterVolumeSpecName: "kube-api-access-65vlj") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "kube-api-access-65vlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.868026 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-ff9694557-gb857" podStartSLOduration=7.868007162 podStartE2EDuration="7.868007162s" podCreationTimestamp="2025-12-05 07:09:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:17.861005722 +0000 UTC m=+1385.587002762" watchObservedRunningTime="2025-12-05 07:09:17.868007162 +0000 UTC m=+1385.594004202" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.882909 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.890250 4863 scope.go:117] "RemoveContainer" containerID="b6394afd1c2e593d121cbf48fbf10799e037a7d0b24121f3c849cf5bb20da032" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.918800 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.935735 4863 scope.go:117] "RemoveContainer" containerID="370445ceeb8f94782a2e211301f0fce343098c373bfa53e4307c56c95a72a7ec" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.954053 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65vlj\" (UniqueName: \"kubernetes.io/projected/fc154801-3bb7-4d1b-8165-10a6c5dcea55-kube-api-access-65vlj\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.954084 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.954094 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.954103 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc154801-3bb7-4d1b-8165-10a6c5dcea55-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.954111 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.964599 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.969335 4863 scope.go:117] "RemoveContainer" containerID="2981c9d5d3508af76d8c7dd845f35c2c30760629a86f54a79e179d5722391038" Dec 05 07:09:17 crc kubenswrapper[4863]: I1205 07:09:17.991019 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data" (OuterVolumeSpecName: "config-data") pod "fc154801-3bb7-4d1b-8165-10a6c5dcea55" (UID: "fc154801-3bb7-4d1b-8165-10a6c5dcea55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.054952 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss9r4\" (UniqueName: \"kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4\") pod \"1b04fc39-eb66-4cee-a14f-8162314e456f\" (UID: \"1b04fc39-eb66-4cee-a14f-8162314e456f\") " Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.055419 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.055446 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc154801-3bb7-4d1b-8165-10a6c5dcea55-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.058689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4" (OuterVolumeSpecName: "kube-api-access-ss9r4") pod "1b04fc39-eb66-4cee-a14f-8162314e456f" (UID: "1b04fc39-eb66-4cee-a14f-8162314e456f"). InnerVolumeSpecName "kube-api-access-ss9r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.156572 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss9r4\" (UniqueName: \"kubernetes.io/projected/1b04fc39-eb66-4cee-a14f-8162314e456f-kube-api-access-ss9r4\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.175834 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.183854 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.203724 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: E1205 07:09:18.204062 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-notification-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204078 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-notification-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: E1205 07:09:18.204087 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="proxy-httpd" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204093 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="proxy-httpd" Dec 05 07:09:18 crc kubenswrapper[4863]: E1205 07:09:18.204105 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b04fc39-eb66-4cee-a14f-8162314e456f" containerName="kube-state-metrics" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204110 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b04fc39-eb66-4cee-a14f-8162314e456f" containerName="kube-state-metrics" Dec 05 07:09:18 crc kubenswrapper[4863]: E1205 07:09:18.204129 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="sg-core" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204134 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="sg-core" Dec 05 07:09:18 crc kubenswrapper[4863]: E1205 07:09:18.204153 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-central-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204159 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-central-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204319 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-notification-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204339 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="proxy-httpd" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204347 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="ceilometer-central-agent" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204356 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" containerName="sg-core" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.204370 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b04fc39-eb66-4cee-a14f-8162314e456f" containerName="kube-state-metrics" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.205855 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.207258 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.207579 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.216887 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.361941 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362285 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shrmm\" (UniqueName: \"kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362349 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362384 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362413 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362492 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.362598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.463919 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.463966 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464031 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464073 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shrmm\" (UniqueName: \"kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464151 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464194 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464495 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.464912 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.468566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.468862 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.469423 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.470075 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.481315 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shrmm\" (UniqueName: \"kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm\") pod \"ceilometer-0\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.523000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.617788 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc154801-3bb7-4d1b-8165-10a6c5dcea55" path="/var/lib/kubelet/pods/fc154801-3bb7-4d1b-8165-10a6c5dcea55/volumes" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.872799 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1b04fc39-eb66-4cee-a14f-8162314e456f","Type":"ContainerDied","Data":"bf238ede63bb0896c0265cc9e61669387b0cd4edf20130731b8e7accdf452c2d"} Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.873164 4863 scope.go:117] "RemoveContainer" containerID="4638cb5d3dfc21e835291a1866346f8cfc7bd2242e8dbe86d9e6e40d7488200e" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.873367 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.906597 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-lflg4"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.907791 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.915557 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.925894 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lflg4"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.940540 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.967551 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.968898 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.978265 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 07:09:18 crc kubenswrapper[4863]: I1205 07:09:18.978499 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.001877 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.019534 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-xmk5k"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.020736 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.049698 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-xmk5k"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.050598 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.080952 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.081007 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.081027 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.081055 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbggp\" (UniqueName: \"kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.081106 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.081150 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb648\" (UniqueName: \"kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.088263 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.105341 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a811-account-create-update-tt5wt"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.106541 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.109215 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.119063 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-7j8b6"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.121180 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.141671 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a811-account-create-update-tt5wt"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.152909 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7j8b6"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.167117 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183571 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183600 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183626 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9h7k\" (UniqueName: \"kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183654 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbggp\" (UniqueName: \"kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183685 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb648\" (UniqueName: \"kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.183762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.184262 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.188685 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.188911 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.199626 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.214233 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbggp\" (UniqueName: \"kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp\") pod \"kube-state-metrics-0\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.215186 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb648\" (UniqueName: \"kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648\") pod \"nova-api-db-create-lflg4\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.237306 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.285792 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxz57\" (UniqueName: \"kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.286379 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttw9f\" (UniqueName: \"kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.286557 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9h7k\" (UniqueName: \"kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.286690 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.286884 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.287012 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.288394 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.299075 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ed59-account-create-update-wgjh7"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.300374 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.307494 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.308199 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ed59-account-create-update-wgjh7"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.310331 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.311444 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9h7k\" (UniqueName: \"kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k\") pod \"nova-cell0-db-create-xmk5k\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.361093 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.389017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.389108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxz57\" (UniqueName: \"kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.389131 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttw9f\" (UniqueName: \"kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.389189 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.390334 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.391005 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.409793 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttw9f\" (UniqueName: \"kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f\") pod \"nova-api-a811-account-create-update-tt5wt\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.413924 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxz57\" (UniqueName: \"kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57\") pod \"nova-cell1-db-create-7j8b6\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.455003 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.474398 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.490563 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.490699 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtfkq\" (UniqueName: \"kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.510802 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3b11-account-create-update-st5zs"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.512284 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.518661 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.534537 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3b11-account-create-update-st5zs"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.592248 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtfkq\" (UniqueName: \"kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.592370 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.593219 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.614753 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtfkq\" (UniqueName: \"kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq\") pod \"nova-cell0-ed59-account-create-update-wgjh7\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.630003 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.693680 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.693740 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7xzr\" (UniqueName: \"kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.795239 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.795319 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7xzr\" (UniqueName: \"kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.796441 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.819835 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7xzr\" (UniqueName: \"kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr\") pod \"nova-cell1-3b11-account-create-update-st5zs\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.923853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerStarted","Data":"237a26fe0783f184d05abaadc2eaf526ef527fe2183d18c68b78a06cc010366f"} Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.924447 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.969451 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.973775 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:19 crc kubenswrapper[4863]: I1205 07:09:19.992239 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lflg4"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.004128 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-xmk5k"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.190222 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a811-account-create-update-tt5wt"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.204449 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7j8b6"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.416953 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ed59-account-create-update-wgjh7"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.537840 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3b11-account-create-update-st5zs"] Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.630136 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b04fc39-eb66-4cee-a14f-8162314e456f" path="/var/lib/kubelet/pods/1b04fc39-eb66-4cee-a14f-8162314e456f/volumes" Dec 05 07:09:20 crc kubenswrapper[4863]: E1205 07:09:20.652685 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb832ec47_54da_4553_890f_0a28c51c170a.slice/crio-679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.766160 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932308 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932352 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932433 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932485 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932520 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932553 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932592 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932619 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhc9n\" (UniqueName: \"kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n\") pod \"b832ec47-54da-4553-890f-0a28c51c170a\" (UID: \"b832ec47-54da-4553-890f-0a28c51c170a\") " Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.932955 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b832ec47-54da-4553-890f-0a28c51c170a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.937974 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs" (OuterVolumeSpecName: "logs") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.940268 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n" (OuterVolumeSpecName: "kube-api-access-jhc9n") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "kube-api-access-jhc9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.940572 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts" (OuterVolumeSpecName: "scripts") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.942552 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.986839 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7j8b6" event={"ID":"9dd9d764-3ac5-4749-a947-5a61426ae7da","Type":"ContainerStarted","Data":"7ea9c5e09c184be80499aec3409d4672489370d0b2ae3c968323ecb809efe5f0"} Dec 05 07:09:20 crc kubenswrapper[4863]: I1205 07:09:20.986888 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7j8b6" event={"ID":"9dd9d764-3ac5-4749-a947-5a61426ae7da","Type":"ContainerStarted","Data":"939da033e58fc38e7944be367884ec58437bff2237899cafeff68c42957a4053"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.012628 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.012775 4863 generic.go:334] "Generic (PLEG): container finished" podID="cf784937-80ca-4588-a6bb-5dd64800c6dd" containerID="d39e60280af797213ff567444e2ac7730357b8fbaed6b124edf76d8004e2f874" exitCode=0 Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.012895 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xmk5k" event={"ID":"cf784937-80ca-4588-a6bb-5dd64800c6dd","Type":"ContainerDied","Data":"d39e60280af797213ff567444e2ac7730357b8fbaed6b124edf76d8004e2f874"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.012927 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xmk5k" event={"ID":"cf784937-80ca-4588-a6bb-5dd64800c6dd","Type":"ContainerStarted","Data":"f7c1b03590df50de7ae32156e40352bd8eadf9f0c0195d7e8231b410b09af081"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.016858 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-7j8b6" podStartSLOduration=2.016840712 podStartE2EDuration="2.016840712s" podCreationTimestamp="2025-12-05 07:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:21.002061024 +0000 UTC m=+1388.728058074" watchObservedRunningTime="2025-12-05 07:09:21.016840712 +0000 UTC m=+1388.742837752" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.018874 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" event={"ID":"03722336-ca3d-42e4-95d8-f9fd1c092124","Type":"ContainerStarted","Data":"61eb40786c6b00d03ed8b78744de2b2ad67ad6d600f5434465a0576c04fb66fc"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.018908 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" event={"ID":"03722336-ca3d-42e4-95d8-f9fd1c092124","Type":"ContainerStarted","Data":"fec844507d1539e511ac8df7cfe36f78dede67354584a234b72affe3a83e3ec1"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.028641 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a811-account-create-update-tt5wt" event={"ID":"bb1b0924-31b6-4b28-b187-8615b5e35545","Type":"ContainerStarted","Data":"fdb429315e2084e65170204a91111fd3a4d74c3f6f5bd180be69d6d333c44d12"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.028683 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a811-account-create-update-tt5wt" event={"ID":"bb1b0924-31b6-4b28-b187-8615b5e35545","Type":"ContainerStarted","Data":"0f8fe50921f8514d4e50d1257538f64fce73c7151144678322dc4bc769fb86b8"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.031881 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerStarted","Data":"ba2040c3ada0dcc55522e0214c772509434380f6d46ec249d9a153dec7052a1c"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.038876 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0765ebea-20ed-4ada-8031-3871a35e5f11","Type":"ContainerStarted","Data":"677ea7e1d8171770713d9ce886e2267dfafbdf296ee2b1383fbe1ffa40227dd7"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.039160 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b832ec47-54da-4553-890f-0a28c51c170a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.039199 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhc9n\" (UniqueName: \"kubernetes.io/projected/b832ec47-54da-4553-890f-0a28c51c170a-kube-api-access-jhc9n\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.039217 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.039229 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.039240 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.047689 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" event={"ID":"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76","Type":"ContainerStarted","Data":"583ad8ddcdacbce532333b63f917dee58bcffb2149da32cbf850a4a46e8b2b4a"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.047734 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" event={"ID":"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76","Type":"ContainerStarted","Data":"a347dd6423fd529abf7659b3e1d84356b01b62e47968dba91b45d0f044de0eae"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064022 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data" (OuterVolumeSpecName: "config-data") pod "b832ec47-54da-4553-890f-0a28c51c170a" (UID: "b832ec47-54da-4553-890f-0a28c51c170a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064143 4863 generic.go:334] "Generic (PLEG): container finished" podID="b832ec47-54da-4553-890f-0a28c51c170a" containerID="679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3" exitCode=137 Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064244 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerDied","Data":"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064278 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b832ec47-54da-4553-890f-0a28c51c170a","Type":"ContainerDied","Data":"5b2571c97fec0d9368f4f36057ac571ca47dda76ea934cbe4a34bdc853ef1e99"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064299 4863 scope.go:117] "RemoveContainer" containerID="679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.064458 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.068155 4863 generic.go:334] "Generic (PLEG): container finished" podID="be1d160c-8541-4f16-9897-99aacd346223" containerID="7351a5d013af7e40148f906ca767b5f7a97a8357859c58e8fb82704ee3f3265f" exitCode=0 Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.069685 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lflg4" event={"ID":"be1d160c-8541-4f16-9897-99aacd346223","Type":"ContainerDied","Data":"7351a5d013af7e40148f906ca767b5f7a97a8357859c58e8fb82704ee3f3265f"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.069926 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lflg4" event={"ID":"be1d160c-8541-4f16-9897-99aacd346223","Type":"ContainerStarted","Data":"18531c3f632225df06b54c752a6df67b40ada2e68e8a23dbbf138c893743856d"} Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.088001 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" podStartSLOduration=2.087981368 podStartE2EDuration="2.087981368s" podCreationTimestamp="2025-12-05 07:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:21.054325812 +0000 UTC m=+1388.780322862" watchObservedRunningTime="2025-12-05 07:09:21.087981368 +0000 UTC m=+1388.813978408" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.099666 4863 scope.go:117] "RemoveContainer" containerID="857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.102517 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-a811-account-create-update-tt5wt" podStartSLOduration=2.102491579 podStartE2EDuration="2.102491579s" podCreationTimestamp="2025-12-05 07:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:21.076777376 +0000 UTC m=+1388.802774426" watchObservedRunningTime="2025-12-05 07:09:21.102491579 +0000 UTC m=+1388.828488619" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.130546 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" podStartSLOduration=2.13052775 podStartE2EDuration="2.13052775s" podCreationTimestamp="2025-12-05 07:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:21.093385549 +0000 UTC m=+1388.819382589" watchObservedRunningTime="2025-12-05 07:09:21.13052775 +0000 UTC m=+1388.856524790" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.140641 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b832ec47-54da-4553-890f-0a28c51c170a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.164032 4863 scope.go:117] "RemoveContainer" containerID="679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3" Dec 05 07:09:21 crc kubenswrapper[4863]: E1205 07:09:21.164414 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3\": container with ID starting with 679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3 not found: ID does not exist" containerID="679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.164454 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3"} err="failed to get container status \"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3\": rpc error: code = NotFound desc = could not find container \"679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3\": container with ID starting with 679d3a3f1ffede8292935ae8ef22c03b92f7bb7a53a08f563838fb23dc6294d3 not found: ID does not exist" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.164500 4863 scope.go:117] "RemoveContainer" containerID="857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c" Dec 05 07:09:21 crc kubenswrapper[4863]: E1205 07:09:21.165287 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c\": container with ID starting with 857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c not found: ID does not exist" containerID="857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.165314 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c"} err="failed to get container status \"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c\": rpc error: code = NotFound desc = could not find container \"857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c\": container with ID starting with 857ffee947ae89feb8ae3bc977d779c5ee4ce1849560b5fef4b92a48470bf83c not found: ID does not exist" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.188677 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.222546 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.269556 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:09:21 crc kubenswrapper[4863]: E1205 07:09:21.270674 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.270705 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api" Dec 05 07:09:21 crc kubenswrapper[4863]: E1205 07:09:21.270730 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api-log" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.270740 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api-log" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.271198 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.271242 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b832ec47-54da-4553-890f-0a28c51c170a" containerName="cinder-api-log" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.275044 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.278210 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.278790 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.278974 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.281075 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451157 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89hmm\" (UniqueName: \"kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451230 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451495 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451567 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451665 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451806 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451876 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451908 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.451951 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.553400 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554438 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554517 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554574 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554604 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554619 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554644 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554698 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89hmm\" (UniqueName: \"kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554727 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.554792 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.555165 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.563958 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.565412 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.574506 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.575510 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.576124 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.580306 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89hmm\" (UniqueName: \"kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.580615 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " pod="openstack/cinder-api-0" Dec 05 07:09:21 crc kubenswrapper[4863]: I1205 07:09:21.806317 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.103612 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerStarted","Data":"7e0226cdb89f1865a3ba27a5a4030c13b5306021fa3f10d070ae1785ea4e1460"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.103884 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerStarted","Data":"a818b568d348af2b6c772e6604272b3c1b40133c3a45624d57e21fb880d12574"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.107137 4863 generic.go:334] "Generic (PLEG): container finished" podID="99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" containerID="583ad8ddcdacbce532333b63f917dee58bcffb2149da32cbf850a4a46e8b2b4a" exitCode=0 Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.107196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" event={"ID":"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76","Type":"ContainerDied","Data":"583ad8ddcdacbce532333b63f917dee58bcffb2149da32cbf850a4a46e8b2b4a"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.112910 4863 generic.go:334] "Generic (PLEG): container finished" podID="03722336-ca3d-42e4-95d8-f9fd1c092124" containerID="61eb40786c6b00d03ed8b78744de2b2ad67ad6d600f5434465a0576c04fb66fc" exitCode=0 Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.112975 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" event={"ID":"03722336-ca3d-42e4-95d8-f9fd1c092124","Type":"ContainerDied","Data":"61eb40786c6b00d03ed8b78744de2b2ad67ad6d600f5434465a0576c04fb66fc"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.117713 4863 generic.go:334] "Generic (PLEG): container finished" podID="bb1b0924-31b6-4b28-b187-8615b5e35545" containerID="fdb429315e2084e65170204a91111fd3a4d74c3f6f5bd180be69d6d333c44d12" exitCode=0 Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.117867 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a811-account-create-update-tt5wt" event={"ID":"bb1b0924-31b6-4b28-b187-8615b5e35545","Type":"ContainerDied","Data":"fdb429315e2084e65170204a91111fd3a4d74c3f6f5bd180be69d6d333c44d12"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.129015 4863 generic.go:334] "Generic (PLEG): container finished" podID="9dd9d764-3ac5-4749-a947-5a61426ae7da" containerID="7ea9c5e09c184be80499aec3409d4672489370d0b2ae3c968323ecb809efe5f0" exitCode=0 Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.129183 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7j8b6" event={"ID":"9dd9d764-3ac5-4749-a947-5a61426ae7da","Type":"ContainerDied","Data":"7ea9c5e09c184be80499aec3409d4672489370d0b2ae3c968323ecb809efe5f0"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.132186 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0765ebea-20ed-4ada-8031-3871a35e5f11","Type":"ContainerStarted","Data":"f6d81c1a4f657c6fae55a4c625c5b5be2de6ad49debaa852a7501310e581a784"} Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.132225 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.202301 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.290560251 podStartE2EDuration="4.202275673s" podCreationTimestamp="2025-12-05 07:09:18 +0000 UTC" firstStartedPulling="2025-12-05 07:09:19.927389149 +0000 UTC m=+1387.653386189" lastFinishedPulling="2025-12-05 07:09:20.839104571 +0000 UTC m=+1388.565101611" observedRunningTime="2025-12-05 07:09:22.172523042 +0000 UTC m=+1389.898520082" watchObservedRunningTime="2025-12-05 07:09:22.202275673 +0000 UTC m=+1389.928272713" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.372567 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.408813 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.580111 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts\") pod \"cf784937-80ca-4588-a6bb-5dd64800c6dd\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.580196 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9h7k\" (UniqueName: \"kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k\") pod \"cf784937-80ca-4588-a6bb-5dd64800c6dd\" (UID: \"cf784937-80ca-4588-a6bb-5dd64800c6dd\") " Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.580809 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cf784937-80ca-4588-a6bb-5dd64800c6dd" (UID: "cf784937-80ca-4588-a6bb-5dd64800c6dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.586581 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k" (OuterVolumeSpecName: "kube-api-access-b9h7k") pod "cf784937-80ca-4588-a6bb-5dd64800c6dd" (UID: "cf784937-80ca-4588-a6bb-5dd64800c6dd"). InnerVolumeSpecName "kube-api-access-b9h7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.657302 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b832ec47-54da-4553-890f-0a28c51c170a" path="/var/lib/kubelet/pods/b832ec47-54da-4553-890f-0a28c51c170a/volumes" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.683212 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf784937-80ca-4588-a6bb-5dd64800c6dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.683305 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9h7k\" (UniqueName: \"kubernetes.io/projected/cf784937-80ca-4588-a6bb-5dd64800c6dd-kube-api-access-b9h7k\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.707635 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.886517 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts\") pod \"be1d160c-8541-4f16-9897-99aacd346223\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.886618 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb648\" (UniqueName: \"kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648\") pod \"be1d160c-8541-4f16-9897-99aacd346223\" (UID: \"be1d160c-8541-4f16-9897-99aacd346223\") " Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.887171 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "be1d160c-8541-4f16-9897-99aacd346223" (UID: "be1d160c-8541-4f16-9897-99aacd346223"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.893660 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648" (OuterVolumeSpecName: "kube-api-access-zb648") pod "be1d160c-8541-4f16-9897-99aacd346223" (UID: "be1d160c-8541-4f16-9897-99aacd346223"). InnerVolumeSpecName "kube-api-access-zb648". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.988400 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be1d160c-8541-4f16-9897-99aacd346223-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:22 crc kubenswrapper[4863]: I1205 07:09:22.988440 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb648\" (UniqueName: \"kubernetes.io/projected/be1d160c-8541-4f16-9897-99aacd346223-kube-api-access-zb648\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.144304 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xmk5k" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.144295 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xmk5k" event={"ID":"cf784937-80ca-4588-a6bb-5dd64800c6dd","Type":"ContainerDied","Data":"f7c1b03590df50de7ae32156e40352bd8eadf9f0c0195d7e8231b410b09af081"} Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.144425 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7c1b03590df50de7ae32156e40352bd8eadf9f0c0195d7e8231b410b09af081" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.145610 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lflg4" event={"ID":"be1d160c-8541-4f16-9897-99aacd346223","Type":"ContainerDied","Data":"18531c3f632225df06b54c752a6df67b40ada2e68e8a23dbbf138c893743856d"} Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.145640 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18531c3f632225df06b54c752a6df67b40ada2e68e8a23dbbf138c893743856d" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.145728 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lflg4" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.148528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerStarted","Data":"549aae67303b6a18242583dd284cfc9f5114833b70853f3457867a22770c53bd"} Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.689328 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.702639 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.723178 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.748719 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814082 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttw9f\" (UniqueName: \"kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f\") pod \"bb1b0924-31b6-4b28-b187-8615b5e35545\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814251 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts\") pod \"bb1b0924-31b6-4b28-b187-8615b5e35545\" (UID: \"bb1b0924-31b6-4b28-b187-8615b5e35545\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814302 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts\") pod \"9dd9d764-3ac5-4749-a947-5a61426ae7da\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814325 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxz57\" (UniqueName: \"kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57\") pod \"9dd9d764-3ac5-4749-a947-5a61426ae7da\" (UID: \"9dd9d764-3ac5-4749-a947-5a61426ae7da\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814346 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts\") pod \"03722336-ca3d-42e4-95d8-f9fd1c092124\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814390 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7xzr\" (UniqueName: \"kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr\") pod \"03722336-ca3d-42e4-95d8-f9fd1c092124\" (UID: \"03722336-ca3d-42e4-95d8-f9fd1c092124\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814777 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bb1b0924-31b6-4b28-b187-8615b5e35545" (UID: "bb1b0924-31b6-4b28-b187-8615b5e35545"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.814933 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9dd9d764-3ac5-4749-a947-5a61426ae7da" (UID: "9dd9d764-3ac5-4749-a947-5a61426ae7da"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.815374 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "03722336-ca3d-42e4-95d8-f9fd1c092124" (UID: "03722336-ca3d-42e4-95d8-f9fd1c092124"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.820861 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr" (OuterVolumeSpecName: "kube-api-access-q7xzr") pod "03722336-ca3d-42e4-95d8-f9fd1c092124" (UID: "03722336-ca3d-42e4-95d8-f9fd1c092124"). InnerVolumeSpecName "kube-api-access-q7xzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.821070 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f" (OuterVolumeSpecName: "kube-api-access-ttw9f") pod "bb1b0924-31b6-4b28-b187-8615b5e35545" (UID: "bb1b0924-31b6-4b28-b187-8615b5e35545"). InnerVolumeSpecName "kube-api-access-ttw9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.828862 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57" (OuterVolumeSpecName: "kube-api-access-kxz57") pod "9dd9d764-3ac5-4749-a947-5a61426ae7da" (UID: "9dd9d764-3ac5-4749-a947-5a61426ae7da"). InnerVolumeSpecName "kube-api-access-kxz57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.915777 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtfkq\" (UniqueName: \"kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq\") pod \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.915908 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts\") pod \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\" (UID: \"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76\") " Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916302 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttw9f\" (UniqueName: \"kubernetes.io/projected/bb1b0924-31b6-4b28-b187-8615b5e35545-kube-api-access-ttw9f\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916317 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb1b0924-31b6-4b28-b187-8615b5e35545-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916327 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd9d764-3ac5-4749-a947-5a61426ae7da-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916335 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxz57\" (UniqueName: \"kubernetes.io/projected/9dd9d764-3ac5-4749-a947-5a61426ae7da-kube-api-access-kxz57\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916343 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03722336-ca3d-42e4-95d8-f9fd1c092124-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916352 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7xzr\" (UniqueName: \"kubernetes.io/projected/03722336-ca3d-42e4-95d8-f9fd1c092124-kube-api-access-q7xzr\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.916776 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" (UID: "99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:09:23 crc kubenswrapper[4863]: I1205 07:09:23.919290 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq" (OuterVolumeSpecName: "kube-api-access-qtfkq") pod "99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" (UID: "99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76"). InnerVolumeSpecName "kube-api-access-qtfkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.019058 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtfkq\" (UniqueName: \"kubernetes.io/projected/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-kube-api-access-qtfkq\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.019096 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.165928 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerStarted","Data":"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.168181 4863 generic.go:334] "Generic (PLEG): container finished" podID="881da2e1-3983-4b33-b724-d252bbde9f39" containerID="5b5b447d7ac85457e6d9869110913db6a4c08d245af0f3f2e634952291917028" exitCode=1 Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.168258 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerDied","Data":"5b5b447d7ac85457e6d9869110913db6a4c08d245af0f3f2e634952291917028"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.168419 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="sg-core" containerID="cri-o://7e0226cdb89f1865a3ba27a5a4030c13b5306021fa3f10d070ae1785ea4e1460" gracePeriod=30 Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.168410 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-central-agent" containerID="cri-o://ba2040c3ada0dcc55522e0214c772509434380f6d46ec249d9a153dec7052a1c" gracePeriod=30 Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.168553 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-notification-agent" containerID="cri-o://a818b568d348af2b6c772e6604272b3c1b40133c3a45624d57e21fb880d12574" gracePeriod=30 Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.171800 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7j8b6" event={"ID":"9dd9d764-3ac5-4749-a947-5a61426ae7da","Type":"ContainerDied","Data":"939da033e58fc38e7944be367884ec58437bff2237899cafeff68c42957a4053"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.171842 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="939da033e58fc38e7944be367884ec58437bff2237899cafeff68c42957a4053" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.171924 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7j8b6" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.176433 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.176518 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed59-account-create-update-wgjh7" event={"ID":"99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76","Type":"ContainerDied","Data":"a347dd6423fd529abf7659b3e1d84356b01b62e47968dba91b45d0f044de0eae"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.176567 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a347dd6423fd529abf7659b3e1d84356b01b62e47968dba91b45d0f044de0eae" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.182259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" event={"ID":"03722336-ca3d-42e4-95d8-f9fd1c092124","Type":"ContainerDied","Data":"fec844507d1539e511ac8df7cfe36f78dede67354584a234b72affe3a83e3ec1"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.182300 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fec844507d1539e511ac8df7cfe36f78dede67354584a234b72affe3a83e3ec1" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.182380 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3b11-account-create-update-st5zs" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.206876 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a811-account-create-update-tt5wt" event={"ID":"bb1b0924-31b6-4b28-b187-8615b5e35545","Type":"ContainerDied","Data":"0f8fe50921f8514d4e50d1257538f64fce73c7151144678322dc4bc769fb86b8"} Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.206927 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f8fe50921f8514d4e50d1257538f64fce73c7151144678322dc4bc769fb86b8" Dec 05 07:09:24 crc kubenswrapper[4863]: I1205 07:09:24.206992 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a811-account-create-update-tt5wt" Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.219226 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerStarted","Data":"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd"} Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.219851 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.223707 4863 generic.go:334] "Generic (PLEG): container finished" podID="881da2e1-3983-4b33-b724-d252bbde9f39" containerID="7e0226cdb89f1865a3ba27a5a4030c13b5306021fa3f10d070ae1785ea4e1460" exitCode=2 Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.223739 4863 generic.go:334] "Generic (PLEG): container finished" podID="881da2e1-3983-4b33-b724-d252bbde9f39" containerID="a818b568d348af2b6c772e6604272b3c1b40133c3a45624d57e21fb880d12574" exitCode=0 Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.223758 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerDied","Data":"7e0226cdb89f1865a3ba27a5a4030c13b5306021fa3f10d070ae1785ea4e1460"} Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.223799 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerDied","Data":"a818b568d348af2b6c772e6604272b3c1b40133c3a45624d57e21fb880d12574"} Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.246303 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.246282121 podStartE2EDuration="4.246282121s" podCreationTimestamp="2025-12-05 07:09:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:25.243722079 +0000 UTC m=+1392.969719129" watchObservedRunningTime="2025-12-05 07:09:25.246282121 +0000 UTC m=+1392.972279161" Dec 05 07:09:25 crc kubenswrapper[4863]: I1205 07:09:25.892753 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:09:26 crc kubenswrapper[4863]: I1205 07:09:26.235445 4863 generic.go:334] "Generic (PLEG): container finished" podID="881da2e1-3983-4b33-b724-d252bbde9f39" containerID="ba2040c3ada0dcc55522e0214c772509434380f6d46ec249d9a153dec7052a1c" exitCode=0 Dec 05 07:09:26 crc kubenswrapper[4863]: I1205 07:09:26.235529 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerDied","Data":"ba2040c3ada0dcc55522e0214c772509434380f6d46ec249d9a153dec7052a1c"} Dec 05 07:09:26 crc kubenswrapper[4863]: I1205 07:09:26.977317 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:26 crc kubenswrapper[4863]: I1205 07:09:26.977650 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-log" containerID="cri-o://32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773" gracePeriod=30 Dec 05 07:09:26 crc kubenswrapper[4863]: I1205 07:09:26.977740 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-httpd" containerID="cri-o://085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9" gracePeriod=30 Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.298340 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"881da2e1-3983-4b33-b724-d252bbde9f39","Type":"ContainerDied","Data":"237a26fe0783f184d05abaadc2eaf526ef527fe2183d18c68b78a06cc010366f"} Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.298690 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="237a26fe0783f184d05abaadc2eaf526ef527fe2183d18c68b78a06cc010366f" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.301232 4863 generic.go:334] "Generic (PLEG): container finished" podID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerID="32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773" exitCode=143 Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.301268 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerDied","Data":"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773"} Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.308254 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.396898 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.396950 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397016 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397081 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397128 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397150 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shrmm\" (UniqueName: \"kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts\") pod \"881da2e1-3983-4b33-b724-d252bbde9f39\" (UID: \"881da2e1-3983-4b33-b724-d252bbde9f39\") " Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397615 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.397685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.398364 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.398393 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/881da2e1-3983-4b33-b724-d252bbde9f39-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.402881 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm" (OuterVolumeSpecName: "kube-api-access-shrmm") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "kube-api-access-shrmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.404709 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts" (OuterVolumeSpecName: "scripts") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.423628 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.476895 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.499711 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shrmm\" (UniqueName: \"kubernetes.io/projected/881da2e1-3983-4b33-b724-d252bbde9f39-kube-api-access-shrmm\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.499748 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.499759 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.499770 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.502809 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data" (OuterVolumeSpecName: "config-data") pod "881da2e1-3983-4b33-b724-d252bbde9f39" (UID: "881da2e1-3983-4b33-b724-d252bbde9f39"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:27 crc kubenswrapper[4863]: I1205 07:09:27.601953 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/881da2e1-3983-4b33-b724-d252bbde9f39-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.001358 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.002808 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-log" containerID="cri-o://d85bbeea8571b4f306cd156e8764d388f33336530169ed2a41e7f7469f038d9e" gracePeriod=30 Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.002912 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-httpd" containerID="cri-o://f6e482cd6530ea79ed8f4591c10bb746bdc47ebe9575fc8d959a6297df953db2" gracePeriod=30 Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.312287 4863 generic.go:334] "Generic (PLEG): container finished" podID="358077aa-1f8a-4496-8546-366bc758746d" containerID="d85bbeea8571b4f306cd156e8764d388f33336530169ed2a41e7f7469f038d9e" exitCode=143 Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.312376 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.312409 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerDied","Data":"d85bbeea8571b4f306cd156e8764d388f33336530169ed2a41e7f7469f038d9e"} Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.353544 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.358248 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.370780 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371188 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb1b0924-31b6-4b28-b187-8615b5e35545" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371209 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb1b0924-31b6-4b28-b187-8615b5e35545" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371225 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="sg-core" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371233 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="sg-core" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371252 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be1d160c-8541-4f16-9897-99aacd346223" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371260 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="be1d160c-8541-4f16-9897-99aacd346223" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371283 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-central-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371292 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-central-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371305 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dd9d764-3ac5-4749-a947-5a61426ae7da" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371312 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dd9d764-3ac5-4749-a947-5a61426ae7da" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371325 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-notification-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371332 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-notification-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371345 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371352 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371370 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf784937-80ca-4588-a6bb-5dd64800c6dd" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371378 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf784937-80ca-4588-a6bb-5dd64800c6dd" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371391 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03722336-ca3d-42e4-95d8-f9fd1c092124" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371399 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="03722336-ca3d-42e4-95d8-f9fd1c092124" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: E1205 07:09:28.371412 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="proxy-httpd" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371420 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="proxy-httpd" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371632 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="03722336-ca3d-42e4-95d8-f9fd1c092124" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371649 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-central-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371663 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dd9d764-3ac5-4749-a947-5a61426ae7da" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371676 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="ceilometer-notification-agent" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371695 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb1b0924-31b6-4b28-b187-8615b5e35545" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371707 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf784937-80ca-4588-a6bb-5dd64800c6dd" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371721 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="sg-core" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371736 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" containerName="mariadb-account-create-update" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371755 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" containerName="proxy-httpd" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.371765 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="be1d160c-8541-4f16-9897-99aacd346223" containerName="mariadb-database-create" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.373756 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.384965 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.385828 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.385859 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.385859 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441103 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441183 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwpvz\" (UniqueName: \"kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441246 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441424 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441460 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441516 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.441593 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542700 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542732 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542759 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwpvz\" (UniqueName: \"kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542843 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542864 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542894 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.542951 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.543153 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.543384 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.547944 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.548465 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.548596 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.549767 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.561790 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.563435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwpvz\" (UniqueName: \"kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz\") pod \"ceilometer-0\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " pod="openstack/ceilometer-0" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.613755 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="881da2e1-3983-4b33-b724-d252bbde9f39" path="/var/lib/kubelet/pods/881da2e1-3983-4b33-b724-d252bbde9f39/volumes" Dec 05 07:09:28 crc kubenswrapper[4863]: I1205 07:09:28.776089 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.043703 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:29 crc kubenswrapper[4863]: W1205 07:09:29.049813 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ecef997_f1fc_450e_b2ca_afc5281f3ff7.slice/crio-d21f2c4ae842b3ea3fae84594591774a7dbe0624c828cc5c6a6bb6f32c46bd50 WatchSource:0}: Error finding container d21f2c4ae842b3ea3fae84594591774a7dbe0624c828cc5c6a6bb6f32c46bd50: Status 404 returned error can't find the container with id d21f2c4ae842b3ea3fae84594591774a7dbe0624c828cc5c6a6bb6f32c46bd50 Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.291034 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.320183 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.324159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerStarted","Data":"d21f2c4ae842b3ea3fae84594591774a7dbe0624c828cc5c6a6bb6f32c46bd50"} Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.663899 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-txgnc"] Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.665347 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.668011 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-96tlz" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.670874 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.671036 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.725694 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-txgnc"] Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.729951 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.730061 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.730124 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zrgw\" (UniqueName: \"kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.730148 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.831585 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.831921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zrgw\" (UniqueName: \"kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.831949 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.832009 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.836680 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.846512 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.848721 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:29 crc kubenswrapper[4863]: I1205 07:09:29.853053 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zrgw\" (UniqueName: \"kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw\") pod \"nova-cell0-conductor-db-sync-txgnc\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:30 crc kubenswrapper[4863]: I1205 07:09:30.094064 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:30 crc kubenswrapper[4863]: I1205 07:09:30.332857 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerStarted","Data":"a8e096227430c723e90bd2e3c81b2443facfbeb97182d3357fd2c6b82315d6f1"} Dec 05 07:09:30 crc kubenswrapper[4863]: I1205 07:09:30.575549 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-txgnc"] Dec 05 07:09:30 crc kubenswrapper[4863]: E1205 07:09:30.947557 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.318355 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.356137 4863 generic.go:334] "Generic (PLEG): container finished" podID="358077aa-1f8a-4496-8546-366bc758746d" containerID="f6e482cd6530ea79ed8f4591c10bb746bdc47ebe9575fc8d959a6297df953db2" exitCode=0 Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.356269 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerDied","Data":"f6e482cd6530ea79ed8f4591c10bb746bdc47ebe9575fc8d959a6297df953db2"} Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.359290 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-txgnc" event={"ID":"b7e691e7-81f8-4d74-8d20-f679070e3321","Type":"ContainerStarted","Data":"0987954a0f79d90012d0901ff91f4b7bcf662e96abc681cebca09d8d29ca02af"} Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.384408 4863 generic.go:334] "Generic (PLEG): container finished" podID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerID="085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9" exitCode=0 Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.384538 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerDied","Data":"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9"} Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.384576 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ff270182-7d2d-4b4c-b1a5-690b07bda3d2","Type":"ContainerDied","Data":"0ac7f84dadec5f62f9a9a572fce63100e20a410a9b099bc6e708f0c94a75ef35"} Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.384597 4863 scope.go:117] "RemoveContainer" containerID="085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.384785 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.385928 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.385988 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clxtz\" (UniqueName: \"kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.386032 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.386082 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.386129 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.386164 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.386959 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs" (OuterVolumeSpecName: "logs") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.397304 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerStarted","Data":"d9db50b5f43b888d7ef87ea8725b346f760b28e1a36a7d4accd3273cc163ad7e"} Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.414848 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.419716 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz" (OuterVolumeSpecName: "kube-api-access-clxtz") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "kube-api-access-clxtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.488585 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.489300 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.489354 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run\") pod \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\" (UID: \"ff270182-7d2d-4b4c-b1a5-690b07bda3d2\") " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.490712 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.492970 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.492995 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.493035 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.493043 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clxtz\" (UniqueName: \"kubernetes.io/projected/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-kube-api-access-clxtz\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.493069 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.499364 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts" (OuterVolumeSpecName: "scripts") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.512267 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data" (OuterVolumeSpecName: "config-data") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.515771 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff270182-7d2d-4b4c-b1a5-690b07bda3d2" (UID: "ff270182-7d2d-4b4c-b1a5-690b07bda3d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.527622 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.594684 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.594720 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.594730 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff270182-7d2d-4b4c-b1a5-690b07bda3d2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.594740 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.609605 4863 scope.go:117] "RemoveContainer" containerID="32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.658913 4863 scope.go:117] "RemoveContainer" containerID="085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9" Dec 05 07:09:31 crc kubenswrapper[4863]: E1205 07:09:31.661567 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9\": container with ID starting with 085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9 not found: ID does not exist" containerID="085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.661619 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9"} err="failed to get container status \"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9\": rpc error: code = NotFound desc = could not find container \"085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9\": container with ID starting with 085549ef0e1d9088df3f45ecde9b7f873db52fb6a0df859853a3b5862e0f97c9 not found: ID does not exist" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.661663 4863 scope.go:117] "RemoveContainer" containerID="32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773" Dec 05 07:09:31 crc kubenswrapper[4863]: E1205 07:09:31.662303 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773\": container with ID starting with 32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773 not found: ID does not exist" containerID="32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.662326 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773"} err="failed to get container status \"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773\": rpc error: code = NotFound desc = could not find container \"32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773\": container with ID starting with 32f1e53e093cf0f0dd999d0253fa18d2ba7ff89ca85a75ee8aab8d525f16b773 not found: ID does not exist" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.742518 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.760772 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.770679 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:31 crc kubenswrapper[4863]: E1205 07:09:31.771175 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-log" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.771194 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-log" Dec 05 07:09:31 crc kubenswrapper[4863]: E1205 07:09:31.771210 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-httpd" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.771217 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-httpd" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.771410 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-log" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.771441 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" containerName="glance-httpd" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.772454 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.776304 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.776621 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.780033 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.855192 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.909081 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.915924 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916048 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916075 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916128 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916157 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l44xw\" (UniqueName: \"kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:31 crc kubenswrapper[4863]: I1205 07:09:31.916303 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.017753 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.017801 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.017836 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.017877 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.017995 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018056 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018095 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wbgs\" (UniqueName: \"kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs\") pod \"358077aa-1f8a-4496-8546-366bc758746d\" (UID: \"358077aa-1f8a-4496-8546-366bc758746d\") " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018368 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018401 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l44xw\" (UniqueName: \"kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018446 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018639 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018670 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018743 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018792 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.018809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.019019 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.021439 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs" (OuterVolumeSpecName: "logs") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.022135 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.023231 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.023284 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.029623 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.030425 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.032889 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.042873 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.044137 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.045849 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l44xw\" (UniqueName: \"kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.047320 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs" (OuterVolumeSpecName: "kube-api-access-5wbgs") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "kube-api-access-5wbgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.053671 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts" (OuterVolumeSpecName: "scripts") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.054327 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.114685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.118984 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.121200 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122413 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122443 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122454 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122463 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122475 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122495 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wbgs\" (UniqueName: \"kubernetes.io/projected/358077aa-1f8a-4496-8546-366bc758746d-kube-api-access-5wbgs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.122505 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/358077aa-1f8a-4496-8546-366bc758746d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.161549 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.176975 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data" (OuterVolumeSpecName: "config-data") pod "358077aa-1f8a-4496-8546-366bc758746d" (UID: "358077aa-1f8a-4496-8546-366bc758746d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.225995 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.226043 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358077aa-1f8a-4496-8546-366bc758746d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.491222 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerStarted","Data":"90c605622e6cd845e63df2b1863037928c241e467bf19819bf8d260792d50bc5"} Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.544893 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"358077aa-1f8a-4496-8546-366bc758746d","Type":"ContainerDied","Data":"8bb27eb8eeb64f7d69c177abcb7d920aae7bd8b4b3b2976f3f707263aa86d34c"} Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.544959 4863 scope.go:117] "RemoveContainer" containerID="f6e482cd6530ea79ed8f4591c10bb746bdc47ebe9575fc8d959a6297df953db2" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.545179 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.617940 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff270182-7d2d-4b4c-b1a5-690b07bda3d2" path="/var/lib/kubelet/pods/ff270182-7d2d-4b4c-b1a5-690b07bda3d2/volumes" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.641686 4863 scope.go:117] "RemoveContainer" containerID="d85bbeea8571b4f306cd156e8764d388f33336530169ed2a41e7f7469f038d9e" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.713363 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.713415 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.743125 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:32 crc kubenswrapper[4863]: E1205 07:09:32.743642 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-httpd" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.743660 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-httpd" Dec 05 07:09:32 crc kubenswrapper[4863]: E1205 07:09:32.743680 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-log" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.743687 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-log" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.743892 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-log" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.743945 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="358077aa-1f8a-4496-8546-366bc758746d" containerName="glance-httpd" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.745067 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.751696 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.752154 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.756441 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861650 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861738 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsvsq\" (UniqueName: \"kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861780 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861841 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861874 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861916 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.861939 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.862005 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.942632 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.963428 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.963669 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.963823 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsvsq\" (UniqueName: \"kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.963931 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.964066 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.964181 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.964292 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.964379 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.964718 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.963719 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.965619 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.970328 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.980504 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.983234 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.983991 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.992264 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsvsq\" (UniqueName: \"kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:32 crc kubenswrapper[4863]: I1205 07:09:32.998449 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " pod="openstack/glance-default-internal-api-0" Dec 05 07:09:33 crc kubenswrapper[4863]: I1205 07:09:33.076534 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:33 crc kubenswrapper[4863]: I1205 07:09:33.554365 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:09:33 crc kubenswrapper[4863]: I1205 07:09:33.578413 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerStarted","Data":"72ec4d046f3f5ad9eff16adc698fc45b4acd74fdb27c848fc1acb11517c892e1"} Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.590366 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerStarted","Data":"0666ab8bbb54ba090a40ff59313b2bf8370c0a8ecdd2479dccffd29f6fbf09d3"} Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.591717 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-central-agent" containerID="cri-o://a8e096227430c723e90bd2e3c81b2443facfbeb97182d3357fd2c6b82315d6f1" gracePeriod=30 Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.593591 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.593662 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-notification-agent" containerID="cri-o://d9db50b5f43b888d7ef87ea8725b346f760b28e1a36a7d4accd3273cc163ad7e" gracePeriod=30 Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.593731 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="proxy-httpd" containerID="cri-o://0666ab8bbb54ba090a40ff59313b2bf8370c0a8ecdd2479dccffd29f6fbf09d3" gracePeriod=30 Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.594526 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="sg-core" containerID="cri-o://90c605622e6cd845e63df2b1863037928c241e467bf19819bf8d260792d50bc5" gracePeriod=30 Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.595943 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerStarted","Data":"a6a3f3263a8ecdd3d1951f70646009a4f53fd5264aa1aae420054b8eeff1e7cd"} Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.597321 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerStarted","Data":"da5d5b4769a7f3a761cd294e9b7bfa4dad907aaa1a1e4ba0ae60a08d55e83ee5"} Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.615228 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="358077aa-1f8a-4496-8546-366bc758746d" path="/var/lib/kubelet/pods/358077aa-1f8a-4496-8546-366bc758746d/volumes" Dec 05 07:09:34 crc kubenswrapper[4863]: I1205 07:09:34.621590 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.207642613 podStartE2EDuration="6.621569225s" podCreationTimestamp="2025-12-05 07:09:28 +0000 UTC" firstStartedPulling="2025-12-05 07:09:29.054371521 +0000 UTC m=+1396.780368561" lastFinishedPulling="2025-12-05 07:09:33.468298133 +0000 UTC m=+1401.194295173" observedRunningTime="2025-12-05 07:09:34.616955463 +0000 UTC m=+1402.342952523" watchObservedRunningTime="2025-12-05 07:09:34.621569225 +0000 UTC m=+1402.347566265" Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.120118 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610220 4863 generic.go:334] "Generic (PLEG): container finished" podID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerID="0666ab8bbb54ba090a40ff59313b2bf8370c0a8ecdd2479dccffd29f6fbf09d3" exitCode=0 Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610542 4863 generic.go:334] "Generic (PLEG): container finished" podID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerID="90c605622e6cd845e63df2b1863037928c241e467bf19819bf8d260792d50bc5" exitCode=2 Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610556 4863 generic.go:334] "Generic (PLEG): container finished" podID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerID="d9db50b5f43b888d7ef87ea8725b346f760b28e1a36a7d4accd3273cc163ad7e" exitCode=0 Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610424 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerDied","Data":"0666ab8bbb54ba090a40ff59313b2bf8370c0a8ecdd2479dccffd29f6fbf09d3"} Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610632 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerDied","Data":"90c605622e6cd845e63df2b1863037928c241e467bf19819bf8d260792d50bc5"} Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.610652 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerDied","Data":"d9db50b5f43b888d7ef87ea8725b346f760b28e1a36a7d4accd3273cc163ad7e"} Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.615736 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerStarted","Data":"1fb7878f5faa41f5bfe6c62080b32d439375f5649baed85652cf33bd1cdfde23"} Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.618593 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerStarted","Data":"0e90f617aa6517298bfd7f2ba20833ae5a734185ffcd027d050e5645935a88c4"} Dec 05 07:09:35 crc kubenswrapper[4863]: I1205 07:09:35.647433 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.6474147949999995 podStartE2EDuration="4.647414795s" podCreationTimestamp="2025-12-05 07:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:35.641909742 +0000 UTC m=+1403.367906782" watchObservedRunningTime="2025-12-05 07:09:35.647414795 +0000 UTC m=+1403.373411835" Dec 05 07:09:36 crc kubenswrapper[4863]: I1205 07:09:36.643558 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerStarted","Data":"45f9964a635141593c0ecd0b472b9d0197658218a0e29d30f979c3117ddf5090"} Dec 05 07:09:36 crc kubenswrapper[4863]: I1205 07:09:36.672506 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.672469806 podStartE2EDuration="4.672469806s" podCreationTimestamp="2025-12-05 07:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:09:36.662628048 +0000 UTC m=+1404.388625078" watchObservedRunningTime="2025-12-05 07:09:36.672469806 +0000 UTC m=+1404.398466846" Dec 05 07:09:37 crc kubenswrapper[4863]: I1205 07:09:37.657108 4863 generic.go:334] "Generic (PLEG): container finished" podID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerID="a8e096227430c723e90bd2e3c81b2443facfbeb97182d3357fd2c6b82315d6f1" exitCode=0 Dec 05 07:09:37 crc kubenswrapper[4863]: I1205 07:09:37.657280 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerDied","Data":"a8e096227430c723e90bd2e3c81b2443facfbeb97182d3357fd2c6b82315d6f1"} Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.545376 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.552745 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.558289 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.587567 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.587798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx7jd\" (UniqueName: \"kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.588039 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.689298 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.689383 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.689495 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx7jd\" (UniqueName: \"kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.690277 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.690627 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.712329 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx7jd\" (UniqueName: \"kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd\") pod \"certified-operators-crth2\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:39 crc kubenswrapper[4863]: I1205 07:09:39.879353 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:41 crc kubenswrapper[4863]: E1205 07:09:41.171232 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.408411 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520054 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520570 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520633 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520664 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520678 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520699 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520867 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwpvz\" (UniqueName: \"kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.520921 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts\") pod \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\" (UID: \"6ecef997-f1fc-450e-b2ca-afc5281f3ff7\") " Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.521216 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.521252 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.521298 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.524181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz" (OuterVolumeSpecName: "kube-api-access-pwpvz") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "kube-api-access-pwpvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.524323 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts" (OuterVolumeSpecName: "scripts") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.545181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.564521 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:09:41 crc kubenswrapper[4863]: W1205 07:09:41.567544 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod179a841f_e722_43ae_88d3_cb31fd9a25b5.slice/crio-b44baeb96d8f952bc584f20dee4ccc67931047ec2e9dad3471a8f23d3167deb3 WatchSource:0}: Error finding container b44baeb96d8f952bc584f20dee4ccc67931047ec2e9dad3471a8f23d3167deb3: Status 404 returned error can't find the container with id b44baeb96d8f952bc584f20dee4ccc67931047ec2e9dad3471a8f23d3167deb3 Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.579141 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.606516 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.612679 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data" (OuterVolumeSpecName: "config-data") pod "6ecef997-f1fc-450e-b2ca-afc5281f3ff7" (UID: "6ecef997-f1fc-450e-b2ca-afc5281f3ff7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623086 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623124 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623141 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623154 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwpvz\" (UniqueName: \"kubernetes.io/projected/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-kube-api-access-pwpvz\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623165 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623176 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.623187 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecef997-f1fc-450e-b2ca-afc5281f3ff7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.698978 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-txgnc" event={"ID":"b7e691e7-81f8-4d74-8d20-f679070e3321","Type":"ContainerStarted","Data":"cc1cf7408987229b4dde7b3d56a94b5b7862725d3aab7d49c4d3ef430a898d89"} Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.701819 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6ecef997-f1fc-450e-b2ca-afc5281f3ff7","Type":"ContainerDied","Data":"d21f2c4ae842b3ea3fae84594591774a7dbe0624c828cc5c6a6bb6f32c46bd50"} Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.701856 4863 scope.go:117] "RemoveContainer" containerID="0666ab8bbb54ba090a40ff59313b2bf8370c0a8ecdd2479dccffd29f6fbf09d3" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.701877 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.703513 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerStarted","Data":"b44baeb96d8f952bc584f20dee4ccc67931047ec2e9dad3471a8f23d3167deb3"} Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.720908 4863 scope.go:117] "RemoveContainer" containerID="90c605622e6cd845e63df2b1863037928c241e467bf19819bf8d260792d50bc5" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.740059 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.743905 4863 scope.go:117] "RemoveContainer" containerID="d9db50b5f43b888d7ef87ea8725b346f760b28e1a36a7d4accd3273cc163ad7e" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.759142 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.768765 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:41 crc kubenswrapper[4863]: E1205 07:09:41.769146 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="proxy-httpd" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769162 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="proxy-httpd" Dec 05 07:09:41 crc kubenswrapper[4863]: E1205 07:09:41.769173 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-central-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769179 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-central-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: E1205 07:09:41.769191 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="sg-core" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769196 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="sg-core" Dec 05 07:09:41 crc kubenswrapper[4863]: E1205 07:09:41.769211 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-notification-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769217 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-notification-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769370 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-central-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769390 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="ceilometer-notification-agent" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769398 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="proxy-httpd" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.769410 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" containerName="sg-core" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.771776 4863 scope.go:117] "RemoveContainer" containerID="a8e096227430c723e90bd2e3c81b2443facfbeb97182d3357fd2c6b82315d6f1" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.772139 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.784441 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.790540 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.790754 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.790858 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826419 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826459 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826515 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826579 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826612 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826633 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8wnq\" (UniqueName: \"kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826656 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.826685 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928574 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928619 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8wnq\" (UniqueName: \"kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928648 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928682 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928726 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928745 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928766 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.928822 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.929182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.929524 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.933665 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.934299 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.935313 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.935826 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.946423 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:41 crc kubenswrapper[4863]: I1205 07:09:41.947965 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8wnq\" (UniqueName: \"kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq\") pod \"ceilometer-0\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " pod="openstack/ceilometer-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.100001 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.122533 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.122595 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.153949 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.178678 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.573435 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.617703 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ecef997-f1fc-450e-b2ca-afc5281f3ff7" path="/var/lib/kubelet/pods/6ecef997-f1fc-450e-b2ca-afc5281f3ff7/volumes" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.713428 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerStarted","Data":"346ca648c241b99730feb8a544be0193d6c93b07ad99956a90afa8a4d29de1d5"} Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.715751 4863 generic.go:334] "Generic (PLEG): container finished" podID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerID="0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd" exitCode=0 Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.715867 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerDied","Data":"0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd"} Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.719002 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.719301 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 07:09:42 crc kubenswrapper[4863]: I1205 07:09:42.760262 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-txgnc" podStartSLOduration=3.144456536 podStartE2EDuration="13.760241348s" podCreationTimestamp="2025-12-05 07:09:29 +0000 UTC" firstStartedPulling="2025-12-05 07:09:30.580819352 +0000 UTC m=+1398.306816392" lastFinishedPulling="2025-12-05 07:09:41.196604164 +0000 UTC m=+1408.922601204" observedRunningTime="2025-12-05 07:09:42.753371161 +0000 UTC m=+1410.479368201" watchObservedRunningTime="2025-12-05 07:09:42.760241348 +0000 UTC m=+1410.486238388" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.077024 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.077392 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.104652 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.136032 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.730829 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerStarted","Data":"c6148bd6b7646b25530cfefce42e5c765d7120f8ca96b10d5d668de841a12acc"} Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.732563 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:43 crc kubenswrapper[4863]: I1205 07:09:43.732597 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:44 crc kubenswrapper[4863]: I1205 07:09:44.738970 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:09:44 crc kubenswrapper[4863]: I1205 07:09:44.739396 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.094346 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.113910 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.616188 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.749913 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerStarted","Data":"a42dd81834f966ad7967099e6129f0d29cb1917041835f9f713903bf5a082300"} Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.752306 4863 generic.go:334] "Generic (PLEG): container finished" podID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerID="5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3" exitCode=0 Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.752361 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerDied","Data":"5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3"} Dec 05 07:09:45 crc kubenswrapper[4863]: I1205 07:09:45.752403 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 07:09:46 crc kubenswrapper[4863]: I1205 07:09:46.532261 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 07:09:50 crc kubenswrapper[4863]: I1205 07:09:50.817811 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerStarted","Data":"cd2335513828cc73c3d619caadeca019dd4b4daa7a7e8eb0addb4d12a08f4352"} Dec 05 07:09:50 crc kubenswrapper[4863]: I1205 07:09:50.820210 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerStarted","Data":"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89"} Dec 05 07:09:50 crc kubenswrapper[4863]: I1205 07:09:50.837057 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-crth2" podStartSLOduration=4.662355006 podStartE2EDuration="11.837041328s" podCreationTimestamp="2025-12-05 07:09:39 +0000 UTC" firstStartedPulling="2025-12-05 07:09:42.717708076 +0000 UTC m=+1410.443705116" lastFinishedPulling="2025-12-05 07:09:49.892394398 +0000 UTC m=+1417.618391438" observedRunningTime="2025-12-05 07:09:50.836524596 +0000 UTC m=+1418.562521636" watchObservedRunningTime="2025-12-05 07:09:50.837041328 +0000 UTC m=+1418.563038368" Dec 05 07:09:51 crc kubenswrapper[4863]: E1205 07:09:51.434964 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:09:51 crc kubenswrapper[4863]: I1205 07:09:51.831998 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerStarted","Data":"2fe6896aea760fd1939ffc571deab1022b933364cb8614fb5c5cc08aecb5d832"} Dec 05 07:09:51 crc kubenswrapper[4863]: I1205 07:09:51.832082 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 07:09:51 crc kubenswrapper[4863]: I1205 07:09:51.858641 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.97846471 podStartE2EDuration="10.858622185s" podCreationTimestamp="2025-12-05 07:09:41 +0000 UTC" firstStartedPulling="2025-12-05 07:09:42.578386307 +0000 UTC m=+1410.304383347" lastFinishedPulling="2025-12-05 07:09:51.458543782 +0000 UTC m=+1419.184540822" observedRunningTime="2025-12-05 07:09:51.850813626 +0000 UTC m=+1419.576810696" watchObservedRunningTime="2025-12-05 07:09:51.858622185 +0000 UTC m=+1419.584619215" Dec 05 07:09:57 crc kubenswrapper[4863]: I1205 07:09:57.882494 4863 generic.go:334] "Generic (PLEG): container finished" podID="b7e691e7-81f8-4d74-8d20-f679070e3321" containerID="cc1cf7408987229b4dde7b3d56a94b5b7862725d3aab7d49c4d3ef430a898d89" exitCode=0 Dec 05 07:09:57 crc kubenswrapper[4863]: I1205 07:09:57.882582 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-txgnc" event={"ID":"b7e691e7-81f8-4d74-8d20-f679070e3321","Type":"ContainerDied","Data":"cc1cf7408987229b4dde7b3d56a94b5b7862725d3aab7d49c4d3ef430a898d89"} Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.237646 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.353981 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts\") pod \"b7e691e7-81f8-4d74-8d20-f679070e3321\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.354062 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data\") pod \"b7e691e7-81f8-4d74-8d20-f679070e3321\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.354103 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zrgw\" (UniqueName: \"kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw\") pod \"b7e691e7-81f8-4d74-8d20-f679070e3321\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.354195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle\") pod \"b7e691e7-81f8-4d74-8d20-f679070e3321\" (UID: \"b7e691e7-81f8-4d74-8d20-f679070e3321\") " Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.359081 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw" (OuterVolumeSpecName: "kube-api-access-7zrgw") pod "b7e691e7-81f8-4d74-8d20-f679070e3321" (UID: "b7e691e7-81f8-4d74-8d20-f679070e3321"). InnerVolumeSpecName "kube-api-access-7zrgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.365594 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts" (OuterVolumeSpecName: "scripts") pod "b7e691e7-81f8-4d74-8d20-f679070e3321" (UID: "b7e691e7-81f8-4d74-8d20-f679070e3321"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.380287 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data" (OuterVolumeSpecName: "config-data") pod "b7e691e7-81f8-4d74-8d20-f679070e3321" (UID: "b7e691e7-81f8-4d74-8d20-f679070e3321"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.384450 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7e691e7-81f8-4d74-8d20-f679070e3321" (UID: "b7e691e7-81f8-4d74-8d20-f679070e3321"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.455798 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.455828 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.455839 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e691e7-81f8-4d74-8d20-f679070e3321-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.455851 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zrgw\" (UniqueName: \"kubernetes.io/projected/b7e691e7-81f8-4d74-8d20-f679070e3321-kube-api-access-7zrgw\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.879495 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.880340 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.900342 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-txgnc" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.900379 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-txgnc" event={"ID":"b7e691e7-81f8-4d74-8d20-f679070e3321","Type":"ContainerDied","Data":"0987954a0f79d90012d0901ff91f4b7bcf662e96abc681cebca09d8d29ca02af"} Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.900400 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0987954a0f79d90012d0901ff91f4b7bcf662e96abc681cebca09d8d29ca02af" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.937068 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:09:59 crc kubenswrapper[4863]: I1205 07:09:59.996365 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:00 crc kubenswrapper[4863]: E1205 07:10:00.001463 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7e691e7-81f8-4d74-8d20-f679070e3321" containerName="nova-cell0-conductor-db-sync" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.001517 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7e691e7-81f8-4d74-8d20-f679070e3321" containerName="nova-cell0-conductor-db-sync" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.001814 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7e691e7-81f8-4d74-8d20-f679070e3321" containerName="nova-cell0-conductor-db-sync" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.002613 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.006526 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.010191 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-96tlz" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.041631 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.168409 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.168496 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.168608 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2cmd\" (UniqueName: \"kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.269791 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2cmd\" (UniqueName: \"kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.269933 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.269975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.274223 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.286120 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.287496 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2cmd\" (UniqueName: \"kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd\") pod \"nova-cell0-conductor-0\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.347489 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.413086 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.887122 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.912678 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"15382ebb-9dca-4939-81d6-438388387256","Type":"ContainerStarted","Data":"087da04f2ae1ec5f79386a062d06ace18f1ec089a13adc9c019b733139bf9e7d"} Dec 05 07:10:00 crc kubenswrapper[4863]: I1205 07:10:00.964936 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:10:01 crc kubenswrapper[4863]: I1205 07:10:01.018586 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:10:01 crc kubenswrapper[4863]: E1205 07:10:01.681438 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:10:01 crc kubenswrapper[4863]: I1205 07:10:01.922453 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" containerID="cri-o://6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" gracePeriod=30 Dec 05 07:10:01 crc kubenswrapper[4863]: I1205 07:10:01.922806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"15382ebb-9dca-4939-81d6-438388387256","Type":"ContainerStarted","Data":"6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6"} Dec 05 07:10:01 crc kubenswrapper[4863]: I1205 07:10:01.922841 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:01 crc kubenswrapper[4863]: I1205 07:10:01.954534 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.954517326 podStartE2EDuration="2.954517326s" podCreationTimestamp="2025-12-05 07:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:10:01.940093336 +0000 UTC m=+1429.666090376" watchObservedRunningTime="2025-12-05 07:10:01.954517326 +0000 UTC m=+1429.680514366" Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.037430 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.041791 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-central-agent" containerID="cri-o://c6148bd6b7646b25530cfefce42e5c765d7120f8ca96b10d5d668de841a12acc" gracePeriod=30 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.041931 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="proxy-httpd" containerID="cri-o://2fe6896aea760fd1939ffc571deab1022b933364cb8614fb5c5cc08aecb5d832" gracePeriod=30 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.041971 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="sg-core" containerID="cri-o://cd2335513828cc73c3d619caadeca019dd4b4daa7a7e8eb0addb4d12a08f4352" gracePeriod=30 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.042002 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-notification-agent" containerID="cri-o://a42dd81834f966ad7967099e6129f0d29cb1917041835f9f713903bf5a082300" gracePeriod=30 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.054552 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.179:3000/\": EOF" Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.938137 4863 generic.go:334] "Generic (PLEG): container finished" podID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerID="2fe6896aea760fd1939ffc571deab1022b933364cb8614fb5c5cc08aecb5d832" exitCode=0 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.938221 4863 generic.go:334] "Generic (PLEG): container finished" podID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerID="cd2335513828cc73c3d619caadeca019dd4b4daa7a7e8eb0addb4d12a08f4352" exitCode=2 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.938235 4863 generic.go:334] "Generic (PLEG): container finished" podID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerID="c6148bd6b7646b25530cfefce42e5c765d7120f8ca96b10d5d668de841a12acc" exitCode=0 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.938644 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-crth2" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="registry-server" containerID="cri-o://b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89" gracePeriod=2 Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.939955 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerDied","Data":"2fe6896aea760fd1939ffc571deab1022b933364cb8614fb5c5cc08aecb5d832"} Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.940113 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerDied","Data":"cd2335513828cc73c3d619caadeca019dd4b4daa7a7e8eb0addb4d12a08f4352"} Dec 05 07:10:02 crc kubenswrapper[4863]: I1205 07:10:02.940140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerDied","Data":"c6148bd6b7646b25530cfefce42e5c765d7120f8ca96b10d5d668de841a12acc"} Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.141525 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.146921 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.165992 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.227466 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhbl5\" (UniqueName: \"kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.227693 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.228127 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.329426 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.329556 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhbl5\" (UniqueName: \"kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.329594 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.330183 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.330496 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.352033 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhbl5\" (UniqueName: \"kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5\") pod \"redhat-marketplace-p4r24\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.468329 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.490491 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.634691 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities\") pod \"179a841f-e722-43ae-88d3-cb31fd9a25b5\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.634812 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content\") pod \"179a841f-e722-43ae-88d3-cb31fd9a25b5\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.634858 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx7jd\" (UniqueName: \"kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd\") pod \"179a841f-e722-43ae-88d3-cb31fd9a25b5\" (UID: \"179a841f-e722-43ae-88d3-cb31fd9a25b5\") " Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.635583 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities" (OuterVolumeSpecName: "utilities") pod "179a841f-e722-43ae-88d3-cb31fd9a25b5" (UID: "179a841f-e722-43ae-88d3-cb31fd9a25b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.640240 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd" (OuterVolumeSpecName: "kube-api-access-lx7jd") pod "179a841f-e722-43ae-88d3-cb31fd9a25b5" (UID: "179a841f-e722-43ae-88d3-cb31fd9a25b5"). InnerVolumeSpecName "kube-api-access-lx7jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.686758 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "179a841f-e722-43ae-88d3-cb31fd9a25b5" (UID: "179a841f-e722-43ae-88d3-cb31fd9a25b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.737111 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.737151 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx7jd\" (UniqueName: \"kubernetes.io/projected/179a841f-e722-43ae-88d3-cb31fd9a25b5-kube-api-access-lx7jd\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.737167 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/179a841f-e722-43ae-88d3-cb31fd9a25b5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.948986 4863 generic.go:334] "Generic (PLEG): container finished" podID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerID="b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89" exitCode=0 Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.949069 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crth2" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.949065 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerDied","Data":"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89"} Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.949520 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crth2" event={"ID":"179a841f-e722-43ae-88d3-cb31fd9a25b5","Type":"ContainerDied","Data":"b44baeb96d8f952bc584f20dee4ccc67931047ec2e9dad3471a8f23d3167deb3"} Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.949545 4863 scope.go:117] "RemoveContainer" containerID="b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.972658 4863 scope.go:117] "RemoveContainer" containerID="5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3" Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.982177 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:10:03 crc kubenswrapper[4863]: I1205 07:10:03.990591 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-crth2"] Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.000634 4863 scope.go:117] "RemoveContainer" containerID="0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.036811 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:04 crc kubenswrapper[4863]: W1205 07:10:04.048065 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30999807_5aa7_49d4_af4c_fe251973e66f.slice/crio-6338802c1977e3d17cd0e817d72bc22bc7a4b2c7bf4125f2eab1ddc7aa47cf64 WatchSource:0}: Error finding container 6338802c1977e3d17cd0e817d72bc22bc7a4b2c7bf4125f2eab1ddc7aa47cf64: Status 404 returned error can't find the container with id 6338802c1977e3d17cd0e817d72bc22bc7a4b2c7bf4125f2eab1ddc7aa47cf64 Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.054896 4863 scope.go:117] "RemoveContainer" containerID="b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89" Dec 05 07:10:04 crc kubenswrapper[4863]: E1205 07:10:04.055192 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89\": container with ID starting with b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89 not found: ID does not exist" containerID="b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.055226 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89"} err="failed to get container status \"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89\": rpc error: code = NotFound desc = could not find container \"b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89\": container with ID starting with b4c4da9e4d0ddd60aba9959e1e7994b96a36cb968341147719b5f27c1ebd4b89 not found: ID does not exist" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.055254 4863 scope.go:117] "RemoveContainer" containerID="5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3" Dec 05 07:10:04 crc kubenswrapper[4863]: E1205 07:10:04.055638 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3\": container with ID starting with 5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3 not found: ID does not exist" containerID="5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.055659 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3"} err="failed to get container status \"5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3\": rpc error: code = NotFound desc = could not find container \"5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3\": container with ID starting with 5fe3ecfaf148595a408d0280cf70ed23e6dd35d901d3fa3d6274eb335c80b0d3 not found: ID does not exist" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.055672 4863 scope.go:117] "RemoveContainer" containerID="0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd" Dec 05 07:10:04 crc kubenswrapper[4863]: E1205 07:10:04.058345 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd\": container with ID starting with 0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd not found: ID does not exist" containerID="0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.058399 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd"} err="failed to get container status \"0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd\": rpc error: code = NotFound desc = could not find container \"0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd\": container with ID starting with 0215256d40ba8b1f3d7eb8943deb6b8f4b081dfb139123543f4764b15fcfb5cd not found: ID does not exist" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.613254 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" path="/var/lib/kubelet/pods/179a841f-e722-43ae-88d3-cb31fd9a25b5/volumes" Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.960555 4863 generic.go:334] "Generic (PLEG): container finished" podID="30999807-5aa7-49d4-af4c-fe251973e66f" containerID="a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb" exitCode=0 Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.960592 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerDied","Data":"a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb"} Dec 05 07:10:04 crc kubenswrapper[4863]: I1205 07:10:04.960617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerStarted","Data":"6338802c1977e3d17cd0e817d72bc22bc7a4b2c7bf4125f2eab1ddc7aa47cf64"} Dec 05 07:10:06 crc kubenswrapper[4863]: I1205 07:10:06.993407 4863 generic.go:334] "Generic (PLEG): container finished" podID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerID="a42dd81834f966ad7967099e6129f0d29cb1917041835f9f713903bf5a082300" exitCode=0 Dec 05 07:10:06 crc kubenswrapper[4863]: I1205 07:10:06.993499 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerDied","Data":"a42dd81834f966ad7967099e6129f0d29cb1917041835f9f713903bf5a082300"} Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.278366 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402241 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402296 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402345 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402391 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402414 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8wnq\" (UniqueName: \"kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402437 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402465 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.402602 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts\") pod \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\" (UID: \"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360\") " Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.403153 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.403334 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.404447 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.408043 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts" (OuterVolumeSpecName: "scripts") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.412710 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq" (OuterVolumeSpecName: "kube-api-access-v8wnq") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "kube-api-access-v8wnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.435921 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.457072 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.485658 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.504896 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.505108 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.505226 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.505283 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8wnq\" (UniqueName: \"kubernetes.io/projected/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-kube-api-access-v8wnq\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.505336 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.505384 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.518628 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data" (OuterVolumeSpecName: "config-data") pod "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" (UID: "a8e8dcb9-7cbb-4334-a0d8-6d1455b41360"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:07 crc kubenswrapper[4863]: I1205 07:10:07.607600 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.008746 4863 generic.go:334] "Generic (PLEG): container finished" podID="30999807-5aa7-49d4-af4c-fe251973e66f" containerID="5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5" exitCode=0 Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.008833 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerDied","Data":"5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5"} Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.012977 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8e8dcb9-7cbb-4334-a0d8-6d1455b41360","Type":"ContainerDied","Data":"346ca648c241b99730feb8a544be0193d6c93b07ad99956a90afa8a4d29de1d5"} Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.013051 4863 scope.go:117] "RemoveContainer" containerID="2fe6896aea760fd1939ffc571deab1022b933364cb8614fb5c5cc08aecb5d832" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.013166 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.065735 4863 scope.go:117] "RemoveContainer" containerID="cd2335513828cc73c3d619caadeca019dd4b4daa7a7e8eb0addb4d12a08f4352" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.073593 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.087575 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.093896 4863 scope.go:117] "RemoveContainer" containerID="a42dd81834f966ad7967099e6129f0d29cb1917041835f9f713903bf5a082300" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096272 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096705 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="extract-utilities" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096725 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="extract-utilities" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096745 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="sg-core" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096769 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="sg-core" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096794 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="registry-server" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096803 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="registry-server" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096818 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="proxy-httpd" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096827 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="proxy-httpd" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096843 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="extract-content" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096851 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="extract-content" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096862 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-notification-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096870 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-notification-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: E1205 07:10:08.096886 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-central-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.096894 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-central-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.097097 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="proxy-httpd" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.097120 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="179a841f-e722-43ae-88d3-cb31fd9a25b5" containerName="registry-server" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.097138 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="sg-core" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.097151 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-notification-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.097167 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" containerName="ceilometer-central-agent" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.099237 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.103602 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.103788 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.103836 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.114497 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.135695 4863 scope.go:117] "RemoveContainer" containerID="c6148bd6b7646b25530cfefce42e5c765d7120f8ca96b10d5d668de841a12acc" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217411 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217570 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217657 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217681 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.217889 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.218051 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.218222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5zz4\" (UniqueName: \"kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320171 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320285 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320350 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320431 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5zz4\" (UniqueName: \"kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320511 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320596 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320636 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320665 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.320865 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.321286 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.326885 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.333230 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.333908 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.337402 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5zz4\" (UniqueName: \"kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.342696 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.348445 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.422138 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.622599 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8e8dcb9-7cbb-4334-a0d8-6d1455b41360" path="/var/lib/kubelet/pods/a8e8dcb9-7cbb-4334-a0d8-6d1455b41360/volumes" Dec 05 07:10:08 crc kubenswrapper[4863]: W1205 07:10:08.918236 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24322583_3a12_45d0_8c7d_ae8b6bfca154.slice/crio-48012543c989b81aae96ba0a4b012fd6a63722d9fb26b7e635fc468189cc7650 WatchSource:0}: Error finding container 48012543c989b81aae96ba0a4b012fd6a63722d9fb26b7e635fc468189cc7650: Status 404 returned error can't find the container with id 48012543c989b81aae96ba0a4b012fd6a63722d9fb26b7e635fc468189cc7650 Dec 05 07:10:08 crc kubenswrapper[4863]: I1205 07:10:08.924189 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:10:09 crc kubenswrapper[4863]: I1205 07:10:09.024227 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerStarted","Data":"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3"} Dec 05 07:10:09 crc kubenswrapper[4863]: I1205 07:10:09.026645 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerStarted","Data":"48012543c989b81aae96ba0a4b012fd6a63722d9fb26b7e635fc468189cc7650"} Dec 05 07:10:09 crc kubenswrapper[4863]: I1205 07:10:09.046219 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p4r24" podStartSLOduration=2.533474959 podStartE2EDuration="6.046199934s" podCreationTimestamp="2025-12-05 07:10:03 +0000 UTC" firstStartedPulling="2025-12-05 07:10:04.962769537 +0000 UTC m=+1432.688766577" lastFinishedPulling="2025-12-05 07:10:08.475494512 +0000 UTC m=+1436.201491552" observedRunningTime="2025-12-05 07:10:09.041954021 +0000 UTC m=+1436.767951081" watchObservedRunningTime="2025-12-05 07:10:09.046199934 +0000 UTC m=+1436.772196974" Dec 05 07:10:10 crc kubenswrapper[4863]: E1205 07:10:10.351430 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:10 crc kubenswrapper[4863]: E1205 07:10:10.354781 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:10 crc kubenswrapper[4863]: E1205 07:10:10.357245 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:10 crc kubenswrapper[4863]: E1205 07:10:10.357446 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:11 crc kubenswrapper[4863]: I1205 07:10:11.046655 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerStarted","Data":"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd"} Dec 05 07:10:11 crc kubenswrapper[4863]: E1205 07:10:11.925183 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc154801_3bb7_4d1b_8165_10a6c5dcea55.slice/crio-c720ebcc707be99cedf9c5045763f92b87ec5598074ebd577c4bc67244c9fd27\": RecentStats: unable to find data in memory cache]" Dec 05 07:10:12 crc kubenswrapper[4863]: I1205 07:10:12.056420 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerStarted","Data":"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f"} Dec 05 07:10:12 crc kubenswrapper[4863]: I1205 07:10:12.056853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerStarted","Data":"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f"} Dec 05 07:10:13 crc kubenswrapper[4863]: I1205 07:10:13.468893 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:13 crc kubenswrapper[4863]: I1205 07:10:13.469246 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:13 crc kubenswrapper[4863]: I1205 07:10:13.516699 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:14 crc kubenswrapper[4863]: I1205 07:10:14.086237 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerStarted","Data":"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff"} Dec 05 07:10:14 crc kubenswrapper[4863]: I1205 07:10:14.086638 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 07:10:14 crc kubenswrapper[4863]: I1205 07:10:14.120081 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.89253524 podStartE2EDuration="6.120061302s" podCreationTimestamp="2025-12-05 07:10:08 +0000 UTC" firstStartedPulling="2025-12-05 07:10:08.920752051 +0000 UTC m=+1436.646749101" lastFinishedPulling="2025-12-05 07:10:13.148278123 +0000 UTC m=+1440.874275163" observedRunningTime="2025-12-05 07:10:14.107765845 +0000 UTC m=+1441.833762895" watchObservedRunningTime="2025-12-05 07:10:14.120061302 +0000 UTC m=+1441.846058352" Dec 05 07:10:14 crc kubenswrapper[4863]: I1205 07:10:14.134480 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:14 crc kubenswrapper[4863]: I1205 07:10:14.181802 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:15 crc kubenswrapper[4863]: E1205 07:10:15.350004 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:15 crc kubenswrapper[4863]: E1205 07:10:15.351415 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:15 crc kubenswrapper[4863]: E1205 07:10:15.354392 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:15 crc kubenswrapper[4863]: E1205 07:10:15.354443 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:16 crc kubenswrapper[4863]: I1205 07:10:16.102589 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p4r24" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="registry-server" containerID="cri-o://7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3" gracePeriod=2 Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.074902 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.123382 4863 generic.go:334] "Generic (PLEG): container finished" podID="30999807-5aa7-49d4-af4c-fe251973e66f" containerID="7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3" exitCode=0 Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.123437 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerDied","Data":"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3"} Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.123489 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p4r24" event={"ID":"30999807-5aa7-49d4-af4c-fe251973e66f","Type":"ContainerDied","Data":"6338802c1977e3d17cd0e817d72bc22bc7a4b2c7bf4125f2eab1ddc7aa47cf64"} Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.123522 4863 scope.go:117] "RemoveContainer" containerID="7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.123740 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p4r24" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.161464 4863 scope.go:117] "RemoveContainer" containerID="5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.180795 4863 scope.go:117] "RemoveContainer" containerID="a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.191462 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities\") pod \"30999807-5aa7-49d4-af4c-fe251973e66f\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.191579 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhbl5\" (UniqueName: \"kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5\") pod \"30999807-5aa7-49d4-af4c-fe251973e66f\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.191667 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content\") pod \"30999807-5aa7-49d4-af4c-fe251973e66f\" (UID: \"30999807-5aa7-49d4-af4c-fe251973e66f\") " Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.192431 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities" (OuterVolumeSpecName: "utilities") pod "30999807-5aa7-49d4-af4c-fe251973e66f" (UID: "30999807-5aa7-49d4-af4c-fe251973e66f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.198647 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5" (OuterVolumeSpecName: "kube-api-access-zhbl5") pod "30999807-5aa7-49d4-af4c-fe251973e66f" (UID: "30999807-5aa7-49d4-af4c-fe251973e66f"). InnerVolumeSpecName "kube-api-access-zhbl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.218592 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30999807-5aa7-49d4-af4c-fe251973e66f" (UID: "30999807-5aa7-49d4-af4c-fe251973e66f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.268318 4863 scope.go:117] "RemoveContainer" containerID="7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3" Dec 05 07:10:17 crc kubenswrapper[4863]: E1205 07:10:17.268733 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3\": container with ID starting with 7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3 not found: ID does not exist" containerID="7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.268774 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3"} err="failed to get container status \"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3\": rpc error: code = NotFound desc = could not find container \"7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3\": container with ID starting with 7e511ae002dd3e61ac4928c89c054e0c67677f540e5eba3055b875261a283df3 not found: ID does not exist" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.268802 4863 scope.go:117] "RemoveContainer" containerID="5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5" Dec 05 07:10:17 crc kubenswrapper[4863]: E1205 07:10:17.269324 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5\": container with ID starting with 5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5 not found: ID does not exist" containerID="5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.269370 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5"} err="failed to get container status \"5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5\": rpc error: code = NotFound desc = could not find container \"5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5\": container with ID starting with 5e260e4569cd0bc62c9fe4e1d31591e914a4789e2073be80182f3c8a8f7a95b5 not found: ID does not exist" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.269397 4863 scope.go:117] "RemoveContainer" containerID="a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb" Dec 05 07:10:17 crc kubenswrapper[4863]: E1205 07:10:17.269712 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb\": container with ID starting with a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb not found: ID does not exist" containerID="a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.269738 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb"} err="failed to get container status \"a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb\": rpc error: code = NotFound desc = could not find container \"a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb\": container with ID starting with a87e1afb913f8042a158bb5424e0f6e97a74999ec851054fb204276ec3279efb not found: ID does not exist" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.294318 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhbl5\" (UniqueName: \"kubernetes.io/projected/30999807-5aa7-49d4-af4c-fe251973e66f-kube-api-access-zhbl5\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.294390 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.294419 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30999807-5aa7-49d4-af4c-fe251973e66f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.474721 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:17 crc kubenswrapper[4863]: I1205 07:10:17.485831 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p4r24"] Dec 05 07:10:18 crc kubenswrapper[4863]: I1205 07:10:18.612619 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" path="/var/lib/kubelet/pods/30999807-5aa7-49d4-af4c-fe251973e66f/volumes" Dec 05 07:10:20 crc kubenswrapper[4863]: E1205 07:10:20.350416 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:20 crc kubenswrapper[4863]: E1205 07:10:20.352280 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:20 crc kubenswrapper[4863]: E1205 07:10:20.353876 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:20 crc kubenswrapper[4863]: E1205 07:10:20.353922 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:25 crc kubenswrapper[4863]: E1205 07:10:25.350467 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:25 crc kubenswrapper[4863]: E1205 07:10:25.352598 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:25 crc kubenswrapper[4863]: E1205 07:10:25.354062 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:25 crc kubenswrapper[4863]: E1205 07:10:25.354099 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.140356 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:26 crc kubenswrapper[4863]: E1205 07:10:26.140740 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="registry-server" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.140757 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="registry-server" Dec 05 07:10:26 crc kubenswrapper[4863]: E1205 07:10:26.140790 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="extract-utilities" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.140798 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="extract-utilities" Dec 05 07:10:26 crc kubenswrapper[4863]: E1205 07:10:26.140814 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="extract-content" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.140821 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="extract-content" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.140997 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="30999807-5aa7-49d4-af4c-fe251973e66f" containerName="registry-server" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.142226 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.180725 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.265043 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75vj7\" (UniqueName: \"kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.265248 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.266248 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.367792 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.367922 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75vj7\" (UniqueName: \"kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.367973 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.368266 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.368435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.393125 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75vj7\" (UniqueName: \"kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7\") pod \"redhat-operators-zmhp9\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.478462 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:26 crc kubenswrapper[4863]: I1205 07:10:26.952490 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:27 crc kubenswrapper[4863]: I1205 07:10:27.249781 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerStarted","Data":"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded"} Dec 05 07:10:27 crc kubenswrapper[4863]: I1205 07:10:27.249829 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerStarted","Data":"b46b6f7126bd5edf661d6caea8d529a3fca7e4cbffae94f44243942ee54cce1a"} Dec 05 07:10:28 crc kubenswrapper[4863]: I1205 07:10:28.262227 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6295f62-24bc-4974-9a41-191497bfdd94" containerID="9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded" exitCode=0 Dec 05 07:10:28 crc kubenswrapper[4863]: I1205 07:10:28.262343 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerDied","Data":"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded"} Dec 05 07:10:29 crc kubenswrapper[4863]: I1205 07:10:29.274951 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerStarted","Data":"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4"} Dec 05 07:10:30 crc kubenswrapper[4863]: I1205 07:10:30.291162 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6295f62-24bc-4974-9a41-191497bfdd94" containerID="db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4" exitCode=0 Dec 05 07:10:30 crc kubenswrapper[4863]: I1205 07:10:30.291386 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerDied","Data":"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4"} Dec 05 07:10:30 crc kubenswrapper[4863]: E1205 07:10:30.351373 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:30 crc kubenswrapper[4863]: E1205 07:10:30.353027 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:30 crc kubenswrapper[4863]: E1205 07:10:30.355330 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:10:30 crc kubenswrapper[4863]: E1205 07:10:30.355374 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:31 crc kubenswrapper[4863]: I1205 07:10:31.302910 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerStarted","Data":"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d"} Dec 05 07:10:31 crc kubenswrapper[4863]: I1205 07:10:31.325256 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zmhp9" podStartSLOduration=2.859201521 podStartE2EDuration="5.325237821s" podCreationTimestamp="2025-12-05 07:10:26 +0000 UTC" firstStartedPulling="2025-12-05 07:10:28.266985767 +0000 UTC m=+1455.992982817" lastFinishedPulling="2025-12-05 07:10:30.733022077 +0000 UTC m=+1458.459019117" observedRunningTime="2025-12-05 07:10:31.319676356 +0000 UTC m=+1459.045673416" watchObservedRunningTime="2025-12-05 07:10:31.325237821 +0000 UTC m=+1459.051234861" Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.314198 4863 generic.go:334] "Generic (PLEG): container finished" podID="15382ebb-9dca-4939-81d6-438388387256" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" exitCode=137 Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.314256 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"15382ebb-9dca-4939-81d6-438388387256","Type":"ContainerDied","Data":"6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6"} Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.905410 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.994803 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle\") pod \"15382ebb-9dca-4939-81d6-438388387256\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.995032 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data\") pod \"15382ebb-9dca-4939-81d6-438388387256\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " Dec 05 07:10:32 crc kubenswrapper[4863]: I1205 07:10:32.995125 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2cmd\" (UniqueName: \"kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd\") pod \"15382ebb-9dca-4939-81d6-438388387256\" (UID: \"15382ebb-9dca-4939-81d6-438388387256\") " Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.001221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd" (OuterVolumeSpecName: "kube-api-access-x2cmd") pod "15382ebb-9dca-4939-81d6-438388387256" (UID: "15382ebb-9dca-4939-81d6-438388387256"). InnerVolumeSpecName "kube-api-access-x2cmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.021762 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data" (OuterVolumeSpecName: "config-data") pod "15382ebb-9dca-4939-81d6-438388387256" (UID: "15382ebb-9dca-4939-81d6-438388387256"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.022052 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15382ebb-9dca-4939-81d6-438388387256" (UID: "15382ebb-9dca-4939-81d6-438388387256"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.097101 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.097145 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15382ebb-9dca-4939-81d6-438388387256-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.097155 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2cmd\" (UniqueName: \"kubernetes.io/projected/15382ebb-9dca-4939-81d6-438388387256-kube-api-access-x2cmd\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.324205 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"15382ebb-9dca-4939-81d6-438388387256","Type":"ContainerDied","Data":"087da04f2ae1ec5f79386a062d06ace18f1ec089a13adc9c019b733139bf9e7d"} Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.324256 4863 scope.go:117] "RemoveContainer" containerID="6a27c3ceb917035733ec7c54becec1c39a3161d2dfb9d8897eaf72f0bbef0cc6" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.324269 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.372961 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.387008 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.397895 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:33 crc kubenswrapper[4863]: E1205 07:10:33.398254 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.398269 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.398498 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="15382ebb-9dca-4939-81d6-438388387256" containerName="nova-cell0-conductor-conductor" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.399053 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.406208 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-96tlz" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.406391 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.408923 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.507359 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jvmv\" (UniqueName: \"kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.507543 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.507577 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.608953 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jvmv\" (UniqueName: \"kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.609422 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.609465 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.613431 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.619224 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.631197 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jvmv\" (UniqueName: \"kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv\") pod \"nova-cell0-conductor-0\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:33 crc kubenswrapper[4863]: I1205 07:10:33.717279 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:34 crc kubenswrapper[4863]: W1205 07:10:34.230943 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef02ff71_0212_4b81_8243_18e2d28b828e.slice/crio-6e869b05d2334aad77eefe80f30464f62def6965b25799672450931bd1399f54 WatchSource:0}: Error finding container 6e869b05d2334aad77eefe80f30464f62def6965b25799672450931bd1399f54: Status 404 returned error can't find the container with id 6e869b05d2334aad77eefe80f30464f62def6965b25799672450931bd1399f54 Dec 05 07:10:34 crc kubenswrapper[4863]: I1205 07:10:34.233128 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:10:34 crc kubenswrapper[4863]: I1205 07:10:34.336295 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ef02ff71-0212-4b81-8243-18e2d28b828e","Type":"ContainerStarted","Data":"6e869b05d2334aad77eefe80f30464f62def6965b25799672450931bd1399f54"} Dec 05 07:10:34 crc kubenswrapper[4863]: I1205 07:10:34.655310 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15382ebb-9dca-4939-81d6-438388387256" path="/var/lib/kubelet/pods/15382ebb-9dca-4939-81d6-438388387256/volumes" Dec 05 07:10:36 crc kubenswrapper[4863]: I1205 07:10:36.354257 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ef02ff71-0212-4b81-8243-18e2d28b828e","Type":"ContainerStarted","Data":"757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6"} Dec 05 07:10:36 crc kubenswrapper[4863]: I1205 07:10:36.354857 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:36 crc kubenswrapper[4863]: I1205 07:10:36.381194 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=3.381175265 podStartE2EDuration="3.381175265s" podCreationTimestamp="2025-12-05 07:10:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:10:36.372531765 +0000 UTC m=+1464.098528825" watchObservedRunningTime="2025-12-05 07:10:36.381175265 +0000 UTC m=+1464.107172315" Dec 05 07:10:36 crc kubenswrapper[4863]: I1205 07:10:36.479916 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:36 crc kubenswrapper[4863]: I1205 07:10:36.479976 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:37 crc kubenswrapper[4863]: I1205 07:10:37.540930 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zmhp9" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="registry-server" probeResult="failure" output=< Dec 05 07:10:37 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 07:10:37 crc kubenswrapper[4863]: > Dec 05 07:10:38 crc kubenswrapper[4863]: I1205 07:10:38.433453 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 07:10:43 crc kubenswrapper[4863]: I1205 07:10:43.750694 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.318364 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-p4rsg"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.320029 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.322743 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.322972 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.335074 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p4rsg"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.440820 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8csbb\" (UniqueName: \"kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.440892 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.441092 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.441122 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.460230 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.461675 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.467452 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.474796 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.502483 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.503837 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.508563 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543414 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543493 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxgbp\" (UniqueName: \"kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543517 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543559 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8csbb\" (UniqueName: \"kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543593 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543618 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.543669 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.552572 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.558310 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.564101 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.569904 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.576437 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8csbb\" (UniqueName: \"kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb\") pod \"nova-cell0-cell-mapping-p4rsg\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.576491 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.578105 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.587283 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.601399 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.633233 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.635213 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.641073 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.642248 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.645109 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.646966 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.646997 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647096 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7p9d\" (UniqueName: \"kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647132 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647158 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647177 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647270 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.649342 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.647332 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lrf6\" (UniqueName: \"kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.653110 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxgbp\" (UniqueName: \"kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.659380 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.677064 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.720132 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxgbp\" (UniqueName: \"kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp\") pod \"nova-scheduler-0\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.749685 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.751291 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.768775 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769132 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lrf6\" (UniqueName: \"kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769197 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769224 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769286 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769308 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769350 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x76nt\" (UniqueName: \"kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769391 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7p9d\" (UniqueName: \"kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769427 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769456 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769513 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.769597 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.775235 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.780879 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.790275 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.790761 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.797249 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.801616 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.803029 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lrf6\" (UniqueName: \"kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6\") pod \"nova-api-0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " pod="openstack/nova-api-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.805973 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7p9d\" (UniqueName: \"kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d\") pod \"nova-cell1-novncproxy-0\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.847047 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871558 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871632 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871661 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871695 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871763 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x76nt\" (UniqueName: \"kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871794 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871817 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5b4k\" (UniqueName: \"kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871870 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.871888 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.872555 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.878023 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.881040 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.894571 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x76nt\" (UniqueName: \"kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt\") pod \"nova-metadata-0\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " pod="openstack/nova-metadata-0" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973100 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973162 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973222 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973309 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5b4k\" (UniqueName: \"kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973358 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.973378 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.974236 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.974800 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.975279 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.976773 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.977287 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:44 crc kubenswrapper[4863]: I1205 07:10:44.998228 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5b4k\" (UniqueName: \"kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k\") pod \"dnsmasq-dns-7d75688ddc-hlnlx\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.096459 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.116053 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.123855 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.355544 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.366539 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:10:45 crc kubenswrapper[4863]: W1205 07:10:45.376663 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod988c8543_e70f_4434_a6ec_8b2807600569.slice/crio-fe4b21928b58e858b3da9590ed30c27f52d0238720d7309c9a3d3dc7a7db3560 WatchSource:0}: Error finding container fe4b21928b58e858b3da9590ed30c27f52d0238720d7309c9a3d3dc7a7db3560: Status 404 returned error can't find the container with id fe4b21928b58e858b3da9590ed30c27f52d0238720d7309c9a3d3dc7a7db3560 Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.378352 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p4rsg"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.472012 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"988c8543-e70f-4434-a6ec-8b2807600569","Type":"ContainerStarted","Data":"fe4b21928b58e858b3da9590ed30c27f52d0238720d7309c9a3d3dc7a7db3560"} Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.477096 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c5397fb3-4823-4067-b49c-4852026100cc","Type":"ContainerStarted","Data":"cf91566b5676cae42c6b0e5d783ec8a3dea8aa4719871ae6cac260816b785b35"} Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.479693 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p4rsg" event={"ID":"b6b7f621-27c8-4603-9de2-23f2fcec007b","Type":"ContainerStarted","Data":"2642300f8665495f58ef557f2d3334e67be58b66d1ca4dee89c2d56682deaee7"} Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.534847 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mdf66"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.536414 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.538389 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.538463 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.553330 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mdf66"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.631316 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.646628 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:10:45 crc kubenswrapper[4863]: W1205 07:10:45.654134 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50fee71d_ec64_438b_952d_c718e64c9eb0.slice/crio-977cbd78d2e8ae9cf49b93f79c0e709ae275025adb5f6699e0459fad069198b2 WatchSource:0}: Error finding container 977cbd78d2e8ae9cf49b93f79c0e709ae275025adb5f6699e0459fad069198b2: Status 404 returned error can't find the container with id 977cbd78d2e8ae9cf49b93f79c0e709ae275025adb5f6699e0459fad069198b2 Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.694207 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7szsf\" (UniqueName: \"kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.694262 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.694524 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.694605 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.755520 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.800686 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7szsf\" (UniqueName: \"kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.800731 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.800851 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.800900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.810687 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.810765 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.813137 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:45 crc kubenswrapper[4863]: I1205 07:10:45.818272 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7szsf\" (UniqueName: \"kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf\") pod \"nova-cell1-conductor-db-sync-mdf66\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.022245 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.515985 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p4rsg" event={"ID":"b6b7f621-27c8-4603-9de2-23f2fcec007b","Type":"ContainerStarted","Data":"7e4f44a5714c7ca375a4b222b0a503203afe6922968573a408f16f804259ed1f"} Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.520254 4863 generic.go:334] "Generic (PLEG): container finished" podID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerID="63f913ec80a2272783d213863d6c62095fe7c1e692569e26a845c7d932a6bd4d" exitCode=0 Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.520328 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" event={"ID":"685d51e1-55c0-4334-9e5c-fae1485c49ce","Type":"ContainerDied","Data":"63f913ec80a2272783d213863d6c62095fe7c1e692569e26a845c7d932a6bd4d"} Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.520357 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" event={"ID":"685d51e1-55c0-4334-9e5c-fae1485c49ce","Type":"ContainerStarted","Data":"f071127da3ed5d79f23b17bea16e0169a8a45c2dbbea91ce8c5662df38861fd1"} Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.525620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerStarted","Data":"281184e7076bfa21e3996e3baf6640f8407b40ce67fb9d64ce333d3422abbf8a"} Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.533800 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerStarted","Data":"977cbd78d2e8ae9cf49b93f79c0e709ae275025adb5f6699e0459fad069198b2"} Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.542824 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mdf66"] Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.546361 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-p4rsg" podStartSLOduration=2.546342446 podStartE2EDuration="2.546342446s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:10:46.533764951 +0000 UTC m=+1474.259761991" watchObservedRunningTime="2025-12-05 07:10:46.546342446 +0000 UTC m=+1474.272339486" Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.568303 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.635168 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:46 crc kubenswrapper[4863]: I1205 07:10:46.818702 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:47 crc kubenswrapper[4863]: I1205 07:10:47.918733 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:10:47 crc kubenswrapper[4863]: I1205 07:10:47.930946 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:10:48 crc kubenswrapper[4863]: I1205 07:10:48.577365 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zmhp9" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="registry-server" containerID="cri-o://176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d" gracePeriod=2 Dec 05 07:10:48 crc kubenswrapper[4863]: I1205 07:10:48.577841 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mdf66" event={"ID":"40cca709-0714-44d5-9105-02eab2284e98","Type":"ContainerStarted","Data":"2e4e8aeaef3032694ce56f66c582660833134a74a67fe9be83224df4d8739850"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.272756 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.370346 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities\") pod \"b6295f62-24bc-4974-9a41-191497bfdd94\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.370496 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75vj7\" (UniqueName: \"kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7\") pod \"b6295f62-24bc-4974-9a41-191497bfdd94\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.370679 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content\") pod \"b6295f62-24bc-4974-9a41-191497bfdd94\" (UID: \"b6295f62-24bc-4974-9a41-191497bfdd94\") " Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.372931 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities" (OuterVolumeSpecName: "utilities") pod "b6295f62-24bc-4974-9a41-191497bfdd94" (UID: "b6295f62-24bc-4974-9a41-191497bfdd94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.377721 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7" (OuterVolumeSpecName: "kube-api-access-75vj7") pod "b6295f62-24bc-4974-9a41-191497bfdd94" (UID: "b6295f62-24bc-4974-9a41-191497bfdd94"). InnerVolumeSpecName "kube-api-access-75vj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.472688 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.472895 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75vj7\" (UniqueName: \"kubernetes.io/projected/b6295f62-24bc-4974-9a41-191497bfdd94-kube-api-access-75vj7\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.496682 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6295f62-24bc-4974-9a41-191497bfdd94" (UID: "b6295f62-24bc-4974-9a41-191497bfdd94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.574665 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6295f62-24bc-4974-9a41-191497bfdd94-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.639755 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6295f62-24bc-4974-9a41-191497bfdd94" containerID="176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d" exitCode=0 Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.639863 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerDied","Data":"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.639901 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmhp9" event={"ID":"b6295f62-24bc-4974-9a41-191497bfdd94","Type":"ContainerDied","Data":"b46b6f7126bd5edf661d6caea8d529a3fca7e4cbffae94f44243942ee54cce1a"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.639923 4863 scope.go:117] "RemoveContainer" containerID="176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.640090 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmhp9" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.656742 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerStarted","Data":"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.669259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mdf66" event={"ID":"40cca709-0714-44d5-9105-02eab2284e98","Type":"ContainerStarted","Data":"902837ddb220f8642d77255db0858d08e6f1e7216d770a851e390ec8b9f821e2"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.682140 4863 scope.go:117] "RemoveContainer" containerID="db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.682426 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" event={"ID":"685d51e1-55c0-4334-9e5c-fae1485c49ce","Type":"ContainerStarted","Data":"13c4e5fca364a92d11aea5e56ec5d1a0673050f112e7fb33ec84c4107f657efc"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.683255 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.701213 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"988c8543-e70f-4434-a6ec-8b2807600569","Type":"ContainerStarted","Data":"9858a46f6a9d4ccc200bc8f0f84044efc6b48e891e151f67f51229f593d907e9"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.701371 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="988c8543-e70f-4434-a6ec-8b2807600569" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://9858a46f6a9d4ccc200bc8f0f84044efc6b48e891e151f67f51229f593d907e9" gracePeriod=30 Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.704780 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.352754811 podStartE2EDuration="5.704761148s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="2025-12-05 07:10:45.657530809 +0000 UTC m=+1473.383527849" lastFinishedPulling="2025-12-05 07:10:49.009537146 +0000 UTC m=+1476.735534186" observedRunningTime="2025-12-05 07:10:49.701017687 +0000 UTC m=+1477.427014727" watchObservedRunningTime="2025-12-05 07:10:49.704761148 +0000 UTC m=+1477.430758188" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.728710 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c5397fb3-4823-4067-b49c-4852026100cc","Type":"ContainerStarted","Data":"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.740659 4863 scope.go:117] "RemoveContainer" containerID="9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.741201 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-mdf66" podStartSLOduration=4.741186521 podStartE2EDuration="4.741186521s" podCreationTimestamp="2025-12-05 07:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:10:49.737867791 +0000 UTC m=+1477.463864831" watchObservedRunningTime="2025-12-05 07:10:49.741186521 +0000 UTC m=+1477.467183561" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.755309 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerStarted","Data":"93d81ea029817b6e05c4732b264044d60340481a3a2ed9d29aea197d9e630f16"} Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.755652 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-log" containerID="cri-o://93d81ea029817b6e05c4732b264044d60340481a3a2ed9d29aea197d9e630f16" gracePeriod=30 Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.755814 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-metadata" containerID="cri-o://c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa" gracePeriod=30 Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.775562 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.785420 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zmhp9"] Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.796225 4863 scope.go:117] "RemoveContainer" containerID="176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.796393 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 07:10:49 crc kubenswrapper[4863]: E1205 07:10:49.796703 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d\": container with ID starting with 176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d not found: ID does not exist" containerID="176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.796746 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d"} err="failed to get container status \"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d\": rpc error: code = NotFound desc = could not find container \"176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d\": container with ID starting with 176facec5394932901c10cb552f9c7801bc4f1987a556e6a68c55581f96bb66d not found: ID does not exist" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.796773 4863 scope.go:117] "RemoveContainer" containerID="db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4" Dec 05 07:10:49 crc kubenswrapper[4863]: E1205 07:10:49.797040 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4\": container with ID starting with db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4 not found: ID does not exist" containerID="db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.797070 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4"} err="failed to get container status \"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4\": rpc error: code = NotFound desc = could not find container \"db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4\": container with ID starting with db9b1bd4870eba46f640e2f07f704b96470cba661aee1304d0ad255d9e8d5da4 not found: ID does not exist" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.797087 4863 scope.go:117] "RemoveContainer" containerID="9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded" Dec 05 07:10:49 crc kubenswrapper[4863]: E1205 07:10:49.797361 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded\": container with ID starting with 9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded not found: ID does not exist" containerID="9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.797403 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded"} err="failed to get container status \"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded\": rpc error: code = NotFound desc = could not find container \"9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded\": container with ID starting with 9d6b906de6c272f7b021b53d214b8f2c188a8f36919dd7e4bc2862a705b17ded not found: ID does not exist" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.821034 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" podStartSLOduration=5.821013798 podStartE2EDuration="5.821013798s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:10:49.82029815 +0000 UTC m=+1477.546295190" watchObservedRunningTime="2025-12-05 07:10:49.821013798 +0000 UTC m=+1477.547010838" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.840960 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.210333746 podStartE2EDuration="5.840940431s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="2025-12-05 07:10:45.378559463 +0000 UTC m=+1473.104556503" lastFinishedPulling="2025-12-05 07:10:49.009166148 +0000 UTC m=+1476.735163188" observedRunningTime="2025-12-05 07:10:49.835991591 +0000 UTC m=+1477.561988631" watchObservedRunningTime="2025-12-05 07:10:49.840940431 +0000 UTC m=+1477.566937471" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.848090 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.857181 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.255095692 podStartE2EDuration="5.857158234s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="2025-12-05 07:10:45.360180937 +0000 UTC m=+1473.086177967" lastFinishedPulling="2025-12-05 07:10:48.962243469 +0000 UTC m=+1476.688240509" observedRunningTime="2025-12-05 07:10:49.853102705 +0000 UTC m=+1477.579099765" watchObservedRunningTime="2025-12-05 07:10:49.857158234 +0000 UTC m=+1477.583155264" Dec 05 07:10:49 crc kubenswrapper[4863]: I1205 07:10:49.880615 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.51937267 podStartE2EDuration="5.880594062s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="2025-12-05 07:10:45.649433722 +0000 UTC m=+1473.375430762" lastFinishedPulling="2025-12-05 07:10:49.010655114 +0000 UTC m=+1476.736652154" observedRunningTime="2025-12-05 07:10:49.877487127 +0000 UTC m=+1477.603484167" watchObservedRunningTime="2025-12-05 07:10:49.880594062 +0000 UTC m=+1477.606591102" Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.116106 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.116204 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.615025 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" path="/var/lib/kubelet/pods/b6295f62-24bc-4974-9a41-191497bfdd94/volumes" Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.766983 4863 generic.go:334] "Generic (PLEG): container finished" podID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerID="93d81ea029817b6e05c4732b264044d60340481a3a2ed9d29aea197d9e630f16" exitCode=143 Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.767063 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerDied","Data":"93d81ea029817b6e05c4732b264044d60340481a3a2ed9d29aea197d9e630f16"} Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.767089 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerStarted","Data":"c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa"} Dec 05 07:10:50 crc kubenswrapper[4863]: I1205 07:10:50.770756 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerStarted","Data":"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf"} Dec 05 07:10:53 crc kubenswrapper[4863]: I1205 07:10:53.802715 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6b7f621-27c8-4603-9de2-23f2fcec007b" containerID="7e4f44a5714c7ca375a4b222b0a503203afe6922968573a408f16f804259ed1f" exitCode=0 Dec 05 07:10:53 crc kubenswrapper[4863]: I1205 07:10:53.803199 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p4rsg" event={"ID":"b6b7f621-27c8-4603-9de2-23f2fcec007b","Type":"ContainerDied","Data":"7e4f44a5714c7ca375a4b222b0a503203afe6922968573a408f16f804259ed1f"} Dec 05 07:10:54 crc kubenswrapper[4863]: I1205 07:10:54.791317 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 07:10:54 crc kubenswrapper[4863]: I1205 07:10:54.829454 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 07:10:54 crc kubenswrapper[4863]: I1205 07:10:54.865364 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.099583 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.101548 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.127106 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.192995 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.193233 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="dnsmasq-dns" containerID="cri-o://a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37" gracePeriod=10 Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.220699 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.294208 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8csbb\" (UniqueName: \"kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb\") pod \"b6b7f621-27c8-4603-9de2-23f2fcec007b\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.294335 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data\") pod \"b6b7f621-27c8-4603-9de2-23f2fcec007b\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.294491 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts\") pod \"b6b7f621-27c8-4603-9de2-23f2fcec007b\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.294522 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle\") pod \"b6b7f621-27c8-4603-9de2-23f2fcec007b\" (UID: \"b6b7f621-27c8-4603-9de2-23f2fcec007b\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.325681 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb" (OuterVolumeSpecName: "kube-api-access-8csbb") pod "b6b7f621-27c8-4603-9de2-23f2fcec007b" (UID: "b6b7f621-27c8-4603-9de2-23f2fcec007b"). InnerVolumeSpecName "kube-api-access-8csbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.327668 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts" (OuterVolumeSpecName: "scripts") pod "b6b7f621-27c8-4603-9de2-23f2fcec007b" (UID: "b6b7f621-27c8-4603-9de2-23f2fcec007b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.349777 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6b7f621-27c8-4603-9de2-23f2fcec007b" (UID: "b6b7f621-27c8-4603-9de2-23f2fcec007b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.365708 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data" (OuterVolumeSpecName: "config-data") pod "b6b7f621-27c8-4603-9de2-23f2fcec007b" (UID: "b6b7f621-27c8-4603-9de2-23f2fcec007b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.396814 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.396855 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.396872 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8csbb\" (UniqueName: \"kubernetes.io/projected/b6b7f621-27c8-4603-9de2-23f2fcec007b-kube-api-access-8csbb\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.396882 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b7f621-27c8-4603-9de2-23f2fcec007b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.650803 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.702676 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b76p2\" (UniqueName: \"kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.702793 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.702882 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.702905 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.702960 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.703139 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc\") pod \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\" (UID: \"d69b9a69-5339-4c6d-ab1c-1e390d959e6f\") " Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.713668 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2" (OuterVolumeSpecName: "kube-api-access-b76p2") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "kube-api-access-b76p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.760531 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.762408 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.767996 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.784902 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config" (OuterVolumeSpecName: "config") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.795106 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d69b9a69-5339-4c6d-ab1c-1e390d959e6f" (UID: "d69b9a69-5339-4c6d-ab1c-1e390d959e6f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805087 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805121 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805134 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805144 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805156 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.805166 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b76p2\" (UniqueName: \"kubernetes.io/projected/d69b9a69-5339-4c6d-ab1c-1e390d959e6f-kube-api-access-b76p2\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.829114 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p4rsg" event={"ID":"b6b7f621-27c8-4603-9de2-23f2fcec007b","Type":"ContainerDied","Data":"2642300f8665495f58ef557f2d3334e67be58b66d1ca4dee89c2d56682deaee7"} Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.829158 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2642300f8665495f58ef557f2d3334e67be58b66d1ca4dee89c2d56682deaee7" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.829156 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p4rsg" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.831426 4863 generic.go:334] "Generic (PLEG): container finished" podID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerID="a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37" exitCode=0 Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.831463 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.831520 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" event={"ID":"d69b9a69-5339-4c6d-ab1c-1e390d959e6f","Type":"ContainerDied","Data":"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37"} Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.831586 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8c9c7-tz6lg" event={"ID":"d69b9a69-5339-4c6d-ab1c-1e390d959e6f","Type":"ContainerDied","Data":"d197009deb0ad263019d16d986a81c918ac8eaeb135021e5afffefc8460fc1f5"} Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.831607 4863 scope.go:117] "RemoveContainer" containerID="a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.878907 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.881041 4863 scope.go:117] "RemoveContainer" containerID="8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.888810 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77d8c9c7-tz6lg"] Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.905949 4863 scope.go:117] "RemoveContainer" containerID="a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37" Dec 05 07:10:55 crc kubenswrapper[4863]: E1205 07:10:55.906445 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37\": container with ID starting with a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37 not found: ID does not exist" containerID="a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.906522 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37"} err="failed to get container status \"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37\": rpc error: code = NotFound desc = could not find container \"a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37\": container with ID starting with a757fea6554d6f00ab05b2acde85036c74a39aed38dd6bdefeb8cda23e42bf37 not found: ID does not exist" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.906576 4863 scope.go:117] "RemoveContainer" containerID="8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785" Dec 05 07:10:55 crc kubenswrapper[4863]: E1205 07:10:55.906959 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785\": container with ID starting with 8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785 not found: ID does not exist" containerID="8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785" Dec 05 07:10:55 crc kubenswrapper[4863]: I1205 07:10:55.907023 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785"} err="failed to get container status \"8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785\": rpc error: code = NotFound desc = could not find container \"8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785\": container with ID starting with 8fd213f76f67b4bc396b44d1c48b2777694f8756519f99645e9d9cf0c4eff785 not found: ID does not exist" Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.039666 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.050065 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.181730 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.182087 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.613697 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" path="/var/lib/kubelet/pods/d69b9a69-5339-4c6d-ab1c-1e390d959e6f/volumes" Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.843090 4863 generic.go:334] "Generic (PLEG): container finished" podID="40cca709-0714-44d5-9105-02eab2284e98" containerID="902837ddb220f8642d77255db0858d08e6f1e7216d770a851e390ec8b9f821e2" exitCode=0 Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.843205 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mdf66" event={"ID":"40cca709-0714-44d5-9105-02eab2284e98","Type":"ContainerDied","Data":"902837ddb220f8642d77255db0858d08e6f1e7216d770a851e390ec8b9f821e2"} Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.845536 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-log" containerID="cri-o://92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b" gracePeriod=30 Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.845914 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c5397fb3-4823-4067-b49c-4852026100cc" containerName="nova-scheduler-scheduler" containerID="cri-o://8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" gracePeriod=30 Dec 05 07:10:56 crc kubenswrapper[4863]: I1205 07:10:56.845999 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-api" containerID="cri-o://fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf" gracePeriod=30 Dec 05 07:10:57 crc kubenswrapper[4863]: I1205 07:10:57.857976 4863 generic.go:334] "Generic (PLEG): container finished" podID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerID="92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b" exitCode=143 Dec 05 07:10:57 crc kubenswrapper[4863]: I1205 07:10:57.858070 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerDied","Data":"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b"} Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.184748 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.251745 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data\") pod \"40cca709-0714-44d5-9105-02eab2284e98\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.251919 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7szsf\" (UniqueName: \"kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf\") pod \"40cca709-0714-44d5-9105-02eab2284e98\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.252026 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle\") pod \"40cca709-0714-44d5-9105-02eab2284e98\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.252060 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts\") pod \"40cca709-0714-44d5-9105-02eab2284e98\" (UID: \"40cca709-0714-44d5-9105-02eab2284e98\") " Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.258555 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf" (OuterVolumeSpecName: "kube-api-access-7szsf") pod "40cca709-0714-44d5-9105-02eab2284e98" (UID: "40cca709-0714-44d5-9105-02eab2284e98"). InnerVolumeSpecName "kube-api-access-7szsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.258956 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts" (OuterVolumeSpecName: "scripts") pod "40cca709-0714-44d5-9105-02eab2284e98" (UID: "40cca709-0714-44d5-9105-02eab2284e98"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.284413 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data" (OuterVolumeSpecName: "config-data") pod "40cca709-0714-44d5-9105-02eab2284e98" (UID: "40cca709-0714-44d5-9105-02eab2284e98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.286118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40cca709-0714-44d5-9105-02eab2284e98" (UID: "40cca709-0714-44d5-9105-02eab2284e98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.354760 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.354798 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7szsf\" (UniqueName: \"kubernetes.io/projected/40cca709-0714-44d5-9105-02eab2284e98-kube-api-access-7szsf\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.354808 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.354818 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40cca709-0714-44d5-9105-02eab2284e98-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.874094 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mdf66" event={"ID":"40cca709-0714-44d5-9105-02eab2284e98","Type":"ContainerDied","Data":"2e4e8aeaef3032694ce56f66c582660833134a74a67fe9be83224df4d8739850"} Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.875459 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e4e8aeaef3032694ce56f66c582660833134a74a67fe9be83224df4d8739850" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.874237 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mdf66" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.930266 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931195 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="extract-content" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931219 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="extract-content" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931236 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="extract-utilities" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931244 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="extract-utilities" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931262 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="registry-server" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931270 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="registry-server" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931295 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="init" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931302 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="init" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931317 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="dnsmasq-dns" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931323 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="dnsmasq-dns" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931332 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6b7f621-27c8-4603-9de2-23f2fcec007b" containerName="nova-manage" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931337 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6b7f621-27c8-4603-9de2-23f2fcec007b" containerName="nova-manage" Dec 05 07:10:58 crc kubenswrapper[4863]: E1205 07:10:58.931350 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40cca709-0714-44d5-9105-02eab2284e98" containerName="nova-cell1-conductor-db-sync" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931355 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="40cca709-0714-44d5-9105-02eab2284e98" containerName="nova-cell1-conductor-db-sync" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931556 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6b7f621-27c8-4603-9de2-23f2fcec007b" containerName="nova-manage" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931582 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6295f62-24bc-4974-9a41-191497bfdd94" containerName="registry-server" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931601 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="40cca709-0714-44d5-9105-02eab2284e98" containerName="nova-cell1-conductor-db-sync" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.931620 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d69b9a69-5339-4c6d-ab1c-1e390d959e6f" containerName="dnsmasq-dns" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.932341 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.934452 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 07:10:58 crc kubenswrapper[4863]: I1205 07:10:58.944247 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.067424 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.067855 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k556n\" (UniqueName: \"kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.067958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.170129 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.170171 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k556n\" (UniqueName: \"kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.170194 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.174560 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.175375 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.188065 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k556n\" (UniqueName: \"kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n\") pod \"nova-cell1-conductor-0\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.258919 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.739050 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:10:59 crc kubenswrapper[4863]: E1205 07:10:59.793759 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:10:59 crc kubenswrapper[4863]: E1205 07:10:59.797731 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:10:59 crc kubenswrapper[4863]: E1205 07:10:59.799760 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:10:59 crc kubenswrapper[4863]: E1205 07:10:59.799798 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c5397fb3-4823-4067-b49c-4852026100cc" containerName="nova-scheduler-scheduler" Dec 05 07:10:59 crc kubenswrapper[4863]: I1205 07:10:59.884605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2","Type":"ContainerStarted","Data":"271765f99b986d264d9c40cd718d08fc4a22ef62ec3a210144c4a50ea66e096b"} Dec 05 07:11:00 crc kubenswrapper[4863]: I1205 07:11:00.900533 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2","Type":"ContainerStarted","Data":"5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758"} Dec 05 07:11:00 crc kubenswrapper[4863]: I1205 07:11:00.902111 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 07:11:00 crc kubenswrapper[4863]: I1205 07:11:00.929862 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.929831585 podStartE2EDuration="2.929831585s" podCreationTimestamp="2025-12-05 07:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:00.917429405 +0000 UTC m=+1488.643426455" watchObservedRunningTime="2025-12-05 07:11:00.929831585 +0000 UTC m=+1488.655828655" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.429600 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.520424 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data\") pod \"c5397fb3-4823-4067-b49c-4852026100cc\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.520605 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxgbp\" (UniqueName: \"kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp\") pod \"c5397fb3-4823-4067-b49c-4852026100cc\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.520699 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle\") pod \"c5397fb3-4823-4067-b49c-4852026100cc\" (UID: \"c5397fb3-4823-4067-b49c-4852026100cc\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.530965 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp" (OuterVolumeSpecName: "kube-api-access-lxgbp") pod "c5397fb3-4823-4067-b49c-4852026100cc" (UID: "c5397fb3-4823-4067-b49c-4852026100cc"). InnerVolumeSpecName "kube-api-access-lxgbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.566209 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data" (OuterVolumeSpecName: "config-data") pod "c5397fb3-4823-4067-b49c-4852026100cc" (UID: "c5397fb3-4823-4067-b49c-4852026100cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.576826 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5397fb3-4823-4067-b49c-4852026100cc" (UID: "c5397fb3-4823-4067-b49c-4852026100cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.622621 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxgbp\" (UniqueName: \"kubernetes.io/projected/c5397fb3-4823-4067-b49c-4852026100cc-kube-api-access-lxgbp\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.622653 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.622662 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5397fb3-4823-4067-b49c-4852026100cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.658285 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.723593 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data\") pod \"50fee71d-ec64-438b-952d-c718e64c9eb0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.723716 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lrf6\" (UniqueName: \"kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6\") pod \"50fee71d-ec64-438b-952d-c718e64c9eb0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.723737 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle\") pod \"50fee71d-ec64-438b-952d-c718e64c9eb0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.723788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs\") pod \"50fee71d-ec64-438b-952d-c718e64c9eb0\" (UID: \"50fee71d-ec64-438b-952d-c718e64c9eb0\") " Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.724302 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs" (OuterVolumeSpecName: "logs") pod "50fee71d-ec64-438b-952d-c718e64c9eb0" (UID: "50fee71d-ec64-438b-952d-c718e64c9eb0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.727743 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6" (OuterVolumeSpecName: "kube-api-access-9lrf6") pod "50fee71d-ec64-438b-952d-c718e64c9eb0" (UID: "50fee71d-ec64-438b-952d-c718e64c9eb0"). InnerVolumeSpecName "kube-api-access-9lrf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.745448 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50fee71d-ec64-438b-952d-c718e64c9eb0" (UID: "50fee71d-ec64-438b-952d-c718e64c9eb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.751938 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data" (OuterVolumeSpecName: "config-data") pod "50fee71d-ec64-438b-952d-c718e64c9eb0" (UID: "50fee71d-ec64-438b-952d-c718e64c9eb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.825651 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.825684 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lrf6\" (UniqueName: \"kubernetes.io/projected/50fee71d-ec64-438b-952d-c718e64c9eb0-kube-api-access-9lrf6\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.825700 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50fee71d-ec64-438b-952d-c718e64c9eb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.825714 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50fee71d-ec64-438b-952d-c718e64c9eb0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.909225 4863 generic.go:334] "Generic (PLEG): container finished" podID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerID="fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf" exitCode=0 Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.909285 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.909296 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerDied","Data":"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf"} Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.909333 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50fee71d-ec64-438b-952d-c718e64c9eb0","Type":"ContainerDied","Data":"977cbd78d2e8ae9cf49b93f79c0e709ae275025adb5f6699e0459fad069198b2"} Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.909354 4863 scope.go:117] "RemoveContainer" containerID="fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.911919 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5397fb3-4823-4067-b49c-4852026100cc" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" exitCode=0 Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.912003 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.912613 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c5397fb3-4823-4067-b49c-4852026100cc","Type":"ContainerDied","Data":"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c"} Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.912640 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c5397fb3-4823-4067-b49c-4852026100cc","Type":"ContainerDied","Data":"cf91566b5676cae42c6b0e5d783ec8a3dea8aa4719871ae6cac260816b785b35"} Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.933720 4863 scope.go:117] "RemoveContainer" containerID="92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.960776 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.968748 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.977454 4863 scope.go:117] "RemoveContainer" containerID="fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf" Dec 05 07:11:01 crc kubenswrapper[4863]: E1205 07:11:01.977982 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf\": container with ID starting with fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf not found: ID does not exist" containerID="fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.978035 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf"} err="failed to get container status \"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf\": rpc error: code = NotFound desc = could not find container \"fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf\": container with ID starting with fd2a3154b511345edfb5289aba3b5a71ec54fe60fdc606dfee13bed061bb18bf not found: ID does not exist" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.978070 4863 scope.go:117] "RemoveContainer" containerID="92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.978451 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:01 crc kubenswrapper[4863]: E1205 07:11:01.978539 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b\": container with ID starting with 92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b not found: ID does not exist" containerID="92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.978588 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b"} err="failed to get container status \"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b\": rpc error: code = NotFound desc = could not find container \"92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b\": container with ID starting with 92641c51c3eef0c32c8cd174207eb71bd7959ce14c5c88f45978a6387815825b not found: ID does not exist" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.978628 4863 scope.go:117] "RemoveContainer" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" Dec 05 07:11:01 crc kubenswrapper[4863]: I1205 07:11:01.998365 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.009346 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: E1205 07:11:02.010157 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5397fb3-4823-4067-b49c-4852026100cc" containerName="nova-scheduler-scheduler" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010183 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5397fb3-4823-4067-b49c-4852026100cc" containerName="nova-scheduler-scheduler" Dec 05 07:11:02 crc kubenswrapper[4863]: E1205 07:11:02.010223 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-api" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010232 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-api" Dec 05 07:11:02 crc kubenswrapper[4863]: E1205 07:11:02.010283 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-log" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010293 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-log" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010780 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-log" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010818 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5397fb3-4823-4067-b49c-4852026100cc" containerName="nova-scheduler-scheduler" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.010848 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" containerName="nova-api-api" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.012796 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.019893 4863 scope.go:117] "RemoveContainer" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" Dec 05 07:11:02 crc kubenswrapper[4863]: E1205 07:11:02.020336 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c\": container with ID starting with 8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c not found: ID does not exist" containerID="8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.020376 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c"} err="failed to get container status \"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c\": rpc error: code = NotFound desc = could not find container \"8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c\": container with ID starting with 8a63b9df4cbfd9d407e4f9a1749c3f48c5fe2af2d803d540f9df08859ec3805c not found: ID does not exist" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.020510 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.022881 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.036613 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.038118 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.040857 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.054888 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.140831 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq6xp\" (UniqueName: \"kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.140894 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.140927 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.140985 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.141037 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.141085 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4ffl\" (UniqueName: \"kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.141102 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242456 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4ffl\" (UniqueName: \"kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242519 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242551 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq6xp\" (UniqueName: \"kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242597 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242660 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.242711 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.243460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.247462 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.247796 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.247915 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.257089 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.259642 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4ffl\" (UniqueName: \"kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl\") pod \"nova-scheduler-0\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.260632 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq6xp\" (UniqueName: \"kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp\") pod \"nova-api-0\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.338112 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.360575 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.613217 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50fee71d-ec64-438b-952d-c718e64c9eb0" path="/var/lib/kubelet/pods/50fee71d-ec64-438b-952d-c718e64c9eb0/volumes" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.613990 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5397fb3-4823-4067-b49c-4852026100cc" path="/var/lib/kubelet/pods/c5397fb3-4823-4067-b49c-4852026100cc/volumes" Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.827668 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: W1205 07:11:02.905557 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode322902d_6cc2_4ff1_a549_08380d7e6010.slice/crio-3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35 WatchSource:0}: Error finding container 3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35: Status 404 returned error can't find the container with id 3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35 Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.908994 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.922108 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e322902d-6cc2-4ff1-a549-08380d7e6010","Type":"ContainerStarted","Data":"3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35"} Dec 05 07:11:02 crc kubenswrapper[4863]: I1205 07:11:02.926687 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerStarted","Data":"438b783c92200316261e417f5c4149eb3edc6c16de8a46ab86d5bb6fff48abcf"} Dec 05 07:11:03 crc kubenswrapper[4863]: I1205 07:11:03.941273 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e322902d-6cc2-4ff1-a549-08380d7e6010","Type":"ContainerStarted","Data":"a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9"} Dec 05 07:11:03 crc kubenswrapper[4863]: I1205 07:11:03.951164 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerStarted","Data":"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f"} Dec 05 07:11:03 crc kubenswrapper[4863]: I1205 07:11:03.951208 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerStarted","Data":"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c"} Dec 05 07:11:03 crc kubenswrapper[4863]: I1205 07:11:03.978036 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.9780035639999998 podStartE2EDuration="2.978003564s" podCreationTimestamp="2025-12-05 07:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:03.959022264 +0000 UTC m=+1491.685019334" watchObservedRunningTime="2025-12-05 07:11:03.978003564 +0000 UTC m=+1491.704000614" Dec 05 07:11:03 crc kubenswrapper[4863]: I1205 07:11:03.997610 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.997570529 podStartE2EDuration="2.997570529s" podCreationTimestamp="2025-12-05 07:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:03.982123574 +0000 UTC m=+1491.708120624" watchObservedRunningTime="2025-12-05 07:11:03.997570529 +0000 UTC m=+1491.723567569" Dec 05 07:11:04 crc kubenswrapper[4863]: I1205 07:11:04.293204 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 07:11:07 crc kubenswrapper[4863]: I1205 07:11:07.361938 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 07:11:08 crc kubenswrapper[4863]: I1205 07:11:08.463893 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:11:08 crc kubenswrapper[4863]: I1205 07:11:08.464014 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:11:12 crc kubenswrapper[4863]: I1205 07:11:12.338374 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:12 crc kubenswrapper[4863]: I1205 07:11:12.338855 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:12 crc kubenswrapper[4863]: I1205 07:11:12.362082 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 07:11:12 crc kubenswrapper[4863]: I1205 07:11:12.394435 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 07:11:13 crc kubenswrapper[4863]: I1205 07:11:13.055951 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 07:11:13 crc kubenswrapper[4863]: I1205 07:11:13.420657 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:13 crc kubenswrapper[4863]: I1205 07:11:13.420665 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:20 crc kubenswrapper[4863]: E1205 07:11:20.024683 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ae2723a_9d30_47a4_8a91_c7b0c713bbd9.slice/crio-conmon-c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa.scope\": RecentStats: unable to find data in memory cache]" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.103220 4863 generic.go:334] "Generic (PLEG): container finished" podID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerID="c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa" exitCode=137 Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.103293 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerDied","Data":"c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa"} Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.104851 4863 generic.go:334] "Generic (PLEG): container finished" podID="988c8543-e70f-4434-a6ec-8b2807600569" containerID="9858a46f6a9d4ccc200bc8f0f84044efc6b48e891e151f67f51229f593d907e9" exitCode=137 Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.104885 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"988c8543-e70f-4434-a6ec-8b2807600569","Type":"ContainerDied","Data":"9858a46f6a9d4ccc200bc8f0f84044efc6b48e891e151f67f51229f593d907e9"} Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.255782 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.405820 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs\") pod \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.405974 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle\") pod \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.406014 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data\") pod \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.406100 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x76nt\" (UniqueName: \"kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt\") pod \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\" (UID: \"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.406388 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs" (OuterVolumeSpecName: "logs") pod "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" (UID: "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.407059 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.411765 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt" (OuterVolumeSpecName: "kube-api-access-x76nt") pod "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" (UID: "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9"). InnerVolumeSpecName "kube-api-access-x76nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.478895 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data" (OuterVolumeSpecName: "config-data") pod "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" (UID: "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.489915 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" (UID: "8ae2723a-9d30-47a4-8a91-c7b0c713bbd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.508540 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.508584 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.508602 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x76nt\" (UniqueName: \"kubernetes.io/projected/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9-kube-api-access-x76nt\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.592901 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.711311 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data\") pod \"988c8543-e70f-4434-a6ec-8b2807600569\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.711370 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7p9d\" (UniqueName: \"kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d\") pod \"988c8543-e70f-4434-a6ec-8b2807600569\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.711447 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle\") pod \"988c8543-e70f-4434-a6ec-8b2807600569\" (UID: \"988c8543-e70f-4434-a6ec-8b2807600569\") " Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.715593 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d" (OuterVolumeSpecName: "kube-api-access-m7p9d") pod "988c8543-e70f-4434-a6ec-8b2807600569" (UID: "988c8543-e70f-4434-a6ec-8b2807600569"). InnerVolumeSpecName "kube-api-access-m7p9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.740275 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data" (OuterVolumeSpecName: "config-data") pod "988c8543-e70f-4434-a6ec-8b2807600569" (UID: "988c8543-e70f-4434-a6ec-8b2807600569"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.746405 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "988c8543-e70f-4434-a6ec-8b2807600569" (UID: "988c8543-e70f-4434-a6ec-8b2807600569"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.813420 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.813567 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/988c8543-e70f-4434-a6ec-8b2807600569-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:20 crc kubenswrapper[4863]: I1205 07:11:20.813580 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7p9d\" (UniqueName: \"kubernetes.io/projected/988c8543-e70f-4434-a6ec-8b2807600569-kube-api-access-m7p9d\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.119943 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"988c8543-e70f-4434-a6ec-8b2807600569","Type":"ContainerDied","Data":"fe4b21928b58e858b3da9590ed30c27f52d0238720d7309c9a3d3dc7a7db3560"} Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.120013 4863 scope.go:117] "RemoveContainer" containerID="9858a46f6a9d4ccc200bc8f0f84044efc6b48e891e151f67f51229f593d907e9" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.120595 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.126605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8ae2723a-9d30-47a4-8a91-c7b0c713bbd9","Type":"ContainerDied","Data":"281184e7076bfa21e3996e3baf6640f8407b40ce67fb9d64ce333d3422abbf8a"} Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.126726 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.157757 4863 scope.go:117] "RemoveContainer" containerID="c291ec8b231b1bf8479c85894d9b3d5c550bd86ae3e535a2639e6fa80c6d76fa" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.165654 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.178661 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.205558 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.209503 4863 scope.go:117] "RemoveContainer" containerID="93d81ea029817b6e05c4732b264044d60340481a3a2ed9d29aea197d9e630f16" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.214025 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.224209 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: E1205 07:11:21.224760 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="988c8543-e70f-4434-a6ec-8b2807600569" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.224785 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="988c8543-e70f-4434-a6ec-8b2807600569" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:11:21 crc kubenswrapper[4863]: E1205 07:11:21.224821 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-metadata" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.224829 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-metadata" Dec 05 07:11:21 crc kubenswrapper[4863]: E1205 07:11:21.224852 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-log" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.224861 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-log" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.225086 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-log" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.225105 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" containerName="nova-metadata-metadata" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.225132 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="988c8543-e70f-4434-a6ec-8b2807600569" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.225940 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.229424 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.229692 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.231242 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.236551 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.238162 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.240104 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.241132 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.243931 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.251385 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323085 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323150 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlnvh\" (UniqueName: \"kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323188 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323231 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323253 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8674\" (UniqueName: \"kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323269 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323453 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323615 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.323722 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425250 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425313 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425333 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425385 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425417 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425462 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlnvh\" (UniqueName: \"kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425497 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425522 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425548 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8674\" (UniqueName: \"kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.425567 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.426019 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.430348 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.430699 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.431202 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.431770 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.435067 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.435422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.435559 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.445080 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8674\" (UniqueName: \"kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674\") pod \"nova-cell1-novncproxy-0\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.454027 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlnvh\" (UniqueName: \"kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh\") pod \"nova-metadata-0\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.561149 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.578128 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:21 crc kubenswrapper[4863]: I1205 07:11:21.911754 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.147536 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerStarted","Data":"a1032a9c51daacc762fb0c8a6abce66d5c9194bb146ec23f1ec16e2f45fb6028"} Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.167617 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.342679 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.343223 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.343515 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.343576 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.346379 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.348097 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.548094 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.549699 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.559234 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.618834 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ae2723a-9d30-47a4-8a91-c7b0c713bbd9" path="/var/lib/kubelet/pods/8ae2723a-9d30-47a4-8a91-c7b0c713bbd9/volumes" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.619710 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="988c8543-e70f-4434-a6ec-8b2807600569" path="/var/lib/kubelet/pods/988c8543-e70f-4434-a6ec-8b2807600569/volumes" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.653346 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.653578 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.653731 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.653920 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjv9m\" (UniqueName: \"kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.655775 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.655870 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757307 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjv9m\" (UniqueName: \"kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757353 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757422 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757451 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757528 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.757568 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.758562 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.758578 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.758591 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.758603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.758675 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.775337 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjv9m\" (UniqueName: \"kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m\") pod \"dnsmasq-dns-cf4ff87b5-5kjhr\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:22 crc kubenswrapper[4863]: I1205 07:11:22.908654 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.160172 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerStarted","Data":"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9"} Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.160539 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerStarted","Data":"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246"} Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.163547 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"79d93942-6ec7-4fea-9e05-a9c831ad3dd3","Type":"ContainerStarted","Data":"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d"} Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.163587 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"79d93942-6ec7-4fea-9e05-a9c831ad3dd3","Type":"ContainerStarted","Data":"14c9a0def12e2f96dde44e805f6c9e16de142125c7b61e9853d336ee312bd476"} Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.191989 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.191963411 podStartE2EDuration="2.191963411s" podCreationTimestamp="2025-12-05 07:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:23.179791245 +0000 UTC m=+1510.905788295" watchObservedRunningTime="2025-12-05 07:11:23.191963411 +0000 UTC m=+1510.917960461" Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.205598 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.20558271 podStartE2EDuration="2.20558271s" podCreationTimestamp="2025-12-05 07:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:23.204034543 +0000 UTC m=+1510.930031583" watchObservedRunningTime="2025-12-05 07:11:23.20558271 +0000 UTC m=+1510.931579750" Dec 05 07:11:23 crc kubenswrapper[4863]: I1205 07:11:23.405951 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.173871 4863 generic.go:334] "Generic (PLEG): container finished" podID="753801f8-f439-415b-9674-08d58e53def8" containerID="440dcc78223ce0c66f6100e309a5a5f235b573f32391c1fa38a3b6037c634c90" exitCode=0 Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.173904 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" event={"ID":"753801f8-f439-415b-9674-08d58e53def8","Type":"ContainerDied","Data":"440dcc78223ce0c66f6100e309a5a5f235b573f32391c1fa38a3b6037c634c90"} Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.174544 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" event={"ID":"753801f8-f439-415b-9674-08d58e53def8","Type":"ContainerStarted","Data":"b4ec874fd7e56578c26a7b3813ef3559afdf5dfdefe30b335a2bb7e7c2f7939c"} Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.756210 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.756756 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-central-agent" containerID="cri-o://2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd" gracePeriod=30 Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.756784 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="proxy-httpd" containerID="cri-o://d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff" gracePeriod=30 Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.756844 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="sg-core" containerID="cri-o://3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f" gracePeriod=30 Dec 05 07:11:24 crc kubenswrapper[4863]: I1205 07:11:24.756867 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-notification-agent" containerID="cri-o://87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f" gracePeriod=30 Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.026570 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.188260 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" event={"ID":"753801f8-f439-415b-9674-08d58e53def8","Type":"ContainerStarted","Data":"415e080e8217329e1a50efac7ddcd01bbbc76c58058e18c23ff48ed51c7a70ce"} Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.198590 4863 generic.go:334] "Generic (PLEG): container finished" podID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerID="d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff" exitCode=0 Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.198625 4863 generic.go:334] "Generic (PLEG): container finished" podID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerID="3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f" exitCode=2 Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.198835 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-log" containerID="cri-o://2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c" gracePeriod=30 Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.198897 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerDied","Data":"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff"} Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.198920 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerDied","Data":"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f"} Dec 05 07:11:25 crc kubenswrapper[4863]: I1205 07:11:25.199221 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-api" containerID="cri-o://590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f" gracePeriod=30 Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.210262 4863 generic.go:334] "Generic (PLEG): container finished" podID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerID="2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd" exitCode=0 Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.210339 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerDied","Data":"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd"} Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.212689 4863 generic.go:334] "Generic (PLEG): container finished" podID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerID="2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c" exitCode=143 Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.212764 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerDied","Data":"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c"} Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.212888 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.242910 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" podStartSLOduration=4.242892036 podStartE2EDuration="4.242892036s" podCreationTimestamp="2025-12-05 07:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:26.233410636 +0000 UTC m=+1513.959407676" watchObservedRunningTime="2025-12-05 07:11:26.242892036 +0000 UTC m=+1513.968889076" Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.561898 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.578235 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:11:26 crc kubenswrapper[4863]: I1205 07:11:26.578280 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.787448 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.862692 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data\") pod \"495b8866-3ef7-48ae-b76e-a8313f08c48f\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.862788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle\") pod \"495b8866-3ef7-48ae-b76e-a8313f08c48f\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.862869 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq6xp\" (UniqueName: \"kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp\") pod \"495b8866-3ef7-48ae-b76e-a8313f08c48f\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.862952 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs\") pod \"495b8866-3ef7-48ae-b76e-a8313f08c48f\" (UID: \"495b8866-3ef7-48ae-b76e-a8313f08c48f\") " Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.864330 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs" (OuterVolumeSpecName: "logs") pod "495b8866-3ef7-48ae-b76e-a8313f08c48f" (UID: "495b8866-3ef7-48ae-b76e-a8313f08c48f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.894839 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp" (OuterVolumeSpecName: "kube-api-access-nq6xp") pod "495b8866-3ef7-48ae-b76e-a8313f08c48f" (UID: "495b8866-3ef7-48ae-b76e-a8313f08c48f"). InnerVolumeSpecName "kube-api-access-nq6xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.921745 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "495b8866-3ef7-48ae-b76e-a8313f08c48f" (UID: "495b8866-3ef7-48ae-b76e-a8313f08c48f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.943717 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data" (OuterVolumeSpecName: "config-data") pod "495b8866-3ef7-48ae-b76e-a8313f08c48f" (UID: "495b8866-3ef7-48ae-b76e-a8313f08c48f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.966166 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.966217 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq6xp\" (UniqueName: \"kubernetes.io/projected/495b8866-3ef7-48ae-b76e-a8313f08c48f-kube-api-access-nq6xp\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.966235 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/495b8866-3ef7-48ae-b76e-a8313f08c48f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:28 crc kubenswrapper[4863]: I1205 07:11:28.966246 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495b8866-3ef7-48ae-b76e-a8313f08c48f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.015778 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.066802 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.066907 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.066957 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.066973 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5zz4\" (UniqueName: \"kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.066994 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.067037 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.067136 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.067173 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle\") pod \"24322583-3a12-45d0-8c7d-ae8b6bfca154\" (UID: \"24322583-3a12-45d0-8c7d-ae8b6bfca154\") " Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.070526 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.072017 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.072082 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts" (OuterVolumeSpecName: "scripts") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.073894 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4" (OuterVolumeSpecName: "kube-api-access-h5zz4") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "kube-api-access-h5zz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.105637 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.144656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.166688 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.178887 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.178968 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.178987 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.179008 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.179021 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5zz4\" (UniqueName: \"kubernetes.io/projected/24322583-3a12-45d0-8c7d-ae8b6bfca154-kube-api-access-h5zz4\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.179033 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.179049 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/24322583-3a12-45d0-8c7d-ae8b6bfca154-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.236238 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data" (OuterVolumeSpecName: "config-data") pod "24322583-3a12-45d0-8c7d-ae8b6bfca154" (UID: "24322583-3a12-45d0-8c7d-ae8b6bfca154"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.255399 4863 generic.go:334] "Generic (PLEG): container finished" podID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerID="590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f" exitCode=0 Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.255459 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerDied","Data":"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f"} Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.255505 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"495b8866-3ef7-48ae-b76e-a8313f08c48f","Type":"ContainerDied","Data":"438b783c92200316261e417f5c4149eb3edc6c16de8a46ab86d5bb6fff48abcf"} Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.255526 4863 scope.go:117] "RemoveContainer" containerID="590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.255661 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.261261 4863 generic.go:334] "Generic (PLEG): container finished" podID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerID="87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f" exitCode=0 Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.261306 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.261311 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerDied","Data":"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f"} Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.261342 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"24322583-3a12-45d0-8c7d-ae8b6bfca154","Type":"ContainerDied","Data":"48012543c989b81aae96ba0a4b012fd6a63722d9fb26b7e635fc468189cc7650"} Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.281073 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24322583-3a12-45d0-8c7d-ae8b6bfca154-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.283155 4863 scope.go:117] "RemoveContainer" containerID="2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.307371 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.319596 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.324181 4863 scope.go:117] "RemoveContainer" containerID="590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.326743 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f\": container with ID starting with 590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f not found: ID does not exist" containerID="590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.326793 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f"} err="failed to get container status \"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f\": rpc error: code = NotFound desc = could not find container \"590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f\": container with ID starting with 590fb34d9a6b02f32052fce0c60b232ec44b8c0ba953a5e080df4ad26826ec6f not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.326820 4863 scope.go:117] "RemoveContainer" containerID="2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.327192 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c\": container with ID starting with 2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c not found: ID does not exist" containerID="2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.327245 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c"} err="failed to get container status \"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c\": rpc error: code = NotFound desc = could not find container \"2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c\": container with ID starting with 2660012c9004b51bd519460559dd9b5fabbb67c0c49448df7552f7a231dba03c not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.327277 4863 scope.go:117] "RemoveContainer" containerID="d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.338024 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.348937 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.366319 4863 scope.go:117] "RemoveContainer" containerID="3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.376103 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377016 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-log" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377118 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-log" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377201 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-api" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377264 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-api" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377340 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="sg-core" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377391 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="sg-core" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377452 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="proxy-httpd" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377530 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="proxy-httpd" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377601 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-central-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377653 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-central-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.377709 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-notification-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.377883 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-notification-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378137 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="sg-core" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378432 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-api" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378546 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-central-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378671 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" containerName="nova-api-log" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378763 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="proxy-httpd" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.378831 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" containerName="ceilometer-notification-agent" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.379960 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.384053 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.384364 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.384946 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.400539 4863 scope.go:117] "RemoveContainer" containerID="87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.401029 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.416078 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.419124 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.427556 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.431751 4863 scope.go:117] "RemoveContainer" containerID="2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.432177 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.432584 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.433207 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485298 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs5tm\" (UniqueName: \"kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485333 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485369 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485405 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485419 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485485 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485503 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxr99\" (UniqueName: \"kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485537 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485551 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485569 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485591 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485638 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.485656 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.524057 4863 scope.go:117] "RemoveContainer" containerID="d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.524741 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff\": container with ID starting with d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff not found: ID does not exist" containerID="d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.524873 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff"} err="failed to get container status \"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff\": rpc error: code = NotFound desc = could not find container \"d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff\": container with ID starting with d8525d874b403de11b44efba7276b2b60bb8f934a858230240d13be24bb50bff not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.525000 4863 scope.go:117] "RemoveContainer" containerID="3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.525629 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f\": container with ID starting with 3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f not found: ID does not exist" containerID="3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.525674 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f"} err="failed to get container status \"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f\": rpc error: code = NotFound desc = could not find container \"3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f\": container with ID starting with 3d3fb70c8e898741739ee66a0687ebe117f62ffeeed490dde0e72b1c4b00871f not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.525700 4863 scope.go:117] "RemoveContainer" containerID="87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.526135 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f\": container with ID starting with 87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f not found: ID does not exist" containerID="87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.526254 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f"} err="failed to get container status \"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f\": rpc error: code = NotFound desc = could not find container \"87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f\": container with ID starting with 87ac51ae343fe6ea0db0b0d15ab588652c7143641333d4cf209a422ef922e50f not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.526804 4863 scope.go:117] "RemoveContainer" containerID="2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd" Dec 05 07:11:29 crc kubenswrapper[4863]: E1205 07:11:29.527187 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd\": container with ID starting with 2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd not found: ID does not exist" containerID="2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.527221 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd"} err="failed to get container status \"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd\": rpc error: code = NotFound desc = could not find container \"2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd\": container with ID starting with 2272832e27e87ab0643ee094873f95c9238344b921c8cb3f422bfca6c703fabd not found: ID does not exist" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587291 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587358 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587419 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs5tm\" (UniqueName: \"kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587544 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587646 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587723 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587738 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587855 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587898 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxr99\" (UniqueName: \"kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.587997 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588013 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588048 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588066 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588572 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588634 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.588809 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.592207 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.594322 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.596654 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.597969 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.598367 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.600530 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.600870 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.601561 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.605321 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.605634 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs5tm\" (UniqueName: \"kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm\") pod \"nova-api-0\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.608611 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxr99\" (UniqueName: \"kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99\") pod \"ceilometer-0\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " pod="openstack/ceilometer-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.708423 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:29 crc kubenswrapper[4863]: I1205 07:11:29.809554 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:11:30 crc kubenswrapper[4863]: I1205 07:11:30.199142 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:30 crc kubenswrapper[4863]: I1205 07:11:30.291217 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerStarted","Data":"efbb27a9b3be9b61fb8bf7c150a2168a7ad4ee4750a1570680eb08cab42dde17"} Dec 05 07:11:30 crc kubenswrapper[4863]: I1205 07:11:30.417933 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:11:30 crc kubenswrapper[4863]: W1205 07:11:30.422389 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ebeed61_b530_43f8_bb15_5e42fa95f1b9.slice/crio-f7447b3f181ff6f96724223418dfac1b61ec5efa59e6634b16ec966894960cf0 WatchSource:0}: Error finding container f7447b3f181ff6f96724223418dfac1b61ec5efa59e6634b16ec966894960cf0: Status 404 returned error can't find the container with id f7447b3f181ff6f96724223418dfac1b61ec5efa59e6634b16ec966894960cf0 Dec 05 07:11:30 crc kubenswrapper[4863]: I1205 07:11:30.612632 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24322583-3a12-45d0-8c7d-ae8b6bfca154" path="/var/lib/kubelet/pods/24322583-3a12-45d0-8c7d-ae8b6bfca154/volumes" Dec 05 07:11:30 crc kubenswrapper[4863]: I1205 07:11:30.613369 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="495b8866-3ef7-48ae-b76e-a8313f08c48f" path="/var/lib/kubelet/pods/495b8866-3ef7-48ae-b76e-a8313f08c48f/volumes" Dec 05 07:11:31 crc kubenswrapper[4863]: I1205 07:11:31.315056 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerStarted","Data":"f7447b3f181ff6f96724223418dfac1b61ec5efa59e6634b16ec966894960cf0"} Dec 05 07:11:31 crc kubenswrapper[4863]: I1205 07:11:31.562253 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:31 crc kubenswrapper[4863]: I1205 07:11:31.578325 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 07:11:31 crc kubenswrapper[4863]: I1205 07:11:31.578391 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 07:11:31 crc kubenswrapper[4863]: I1205 07:11:31.582732 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.340032 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.490440 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-tzwm5"] Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.495136 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.498006 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.498192 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.509534 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-tzwm5"] Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.545499 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdg6k\" (UniqueName: \"kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.545604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.545716 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.545805 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.590746 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.591313 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.648197 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.648277 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdg6k\" (UniqueName: \"kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.648366 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.648589 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.654416 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.654549 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.657990 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.672914 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdg6k\" (UniqueName: \"kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k\") pod \"nova-cell1-cell-mapping-tzwm5\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.821575 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:32 crc kubenswrapper[4863]: I1205 07:11:32.911638 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.001568 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.003804 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="dnsmasq-dns" containerID="cri-o://13c4e5fca364a92d11aea5e56ec5d1a0673050f112e7fb33ec84c4107f657efc" gracePeriod=10 Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.338308 4863 generic.go:334] "Generic (PLEG): container finished" podID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerID="13c4e5fca364a92d11aea5e56ec5d1a0673050f112e7fb33ec84c4107f657efc" exitCode=0 Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.338327 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" event={"ID":"685d51e1-55c0-4334-9e5c-fae1485c49ce","Type":"ContainerDied","Data":"13c4e5fca364a92d11aea5e56ec5d1a0673050f112e7fb33ec84c4107f657efc"} Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.341218 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerStarted","Data":"faa93203b4e74ae18536fcc22dae3d5e667b4f4cb6230251e992eb4cbb666258"} Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.344153 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerStarted","Data":"6c2aa8e2e74c298f68a431ba05d49309fee9ad00f8acd7d8c61f3238e48cfe23"} Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.344197 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-tzwm5"] Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.344214 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerStarted","Data":"47b48a389edbdc55351befcd565200840fae347bc65022071d6439e0fc3262ee"} Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.371995 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.371970351 podStartE2EDuration="4.371970351s" podCreationTimestamp="2025-12-05 07:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:33.371164272 +0000 UTC m=+1521.097161312" watchObservedRunningTime="2025-12-05 07:11:33.371970351 +0000 UTC m=+1521.097967391" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.526449 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.698543 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.698876 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5b4k\" (UniqueName: \"kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.698928 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.699003 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.699051 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.699186 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config\") pod \"685d51e1-55c0-4334-9e5c-fae1485c49ce\" (UID: \"685d51e1-55c0-4334-9e5c-fae1485c49ce\") " Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.713614 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k" (OuterVolumeSpecName: "kube-api-access-l5b4k") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "kube-api-access-l5b4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.780205 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config" (OuterVolumeSpecName: "config") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.789635 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.794107 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.798905 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.804851 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.804892 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.804904 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.804917 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5b4k\" (UniqueName: \"kubernetes.io/projected/685d51e1-55c0-4334-9e5c-fae1485c49ce-kube-api-access-l5b4k\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.804929 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.805920 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "685d51e1-55c0-4334-9e5c-fae1485c49ce" (UID: "685d51e1-55c0-4334-9e5c-fae1485c49ce"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:11:33 crc kubenswrapper[4863]: I1205 07:11:33.906912 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/685d51e1-55c0-4334-9e5c-fae1485c49ce-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.376171 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tzwm5" event={"ID":"9817e0ed-1998-43c8-a8e7-b9f94d58d433","Type":"ContainerStarted","Data":"00bcf26dc0b9990e06d0e33384666ee00322bb913ab4306109d274baea0f46d4"} Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.376581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tzwm5" event={"ID":"9817e0ed-1998-43c8-a8e7-b9f94d58d433","Type":"ContainerStarted","Data":"a23c096383ec8df2cdd31e7f91ad315a0920a785b98e2a6f58b3770d41503460"} Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.387561 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" event={"ID":"685d51e1-55c0-4334-9e5c-fae1485c49ce","Type":"ContainerDied","Data":"f071127da3ed5d79f23b17bea16e0169a8a45c2dbbea91ce8c5662df38861fd1"} Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.387627 4863 scope.go:117] "RemoveContainer" containerID="13c4e5fca364a92d11aea5e56ec5d1a0673050f112e7fb33ec84c4107f657efc" Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.387847 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d75688ddc-hlnlx" Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.403282 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerStarted","Data":"8a42fab69785176997b7e2fb38ed78927e9b906b9e9eb6e7ad136bfb756c4118"} Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.427616 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-tzwm5" podStartSLOduration=2.427592413 podStartE2EDuration="2.427592413s" podCreationTimestamp="2025-12-05 07:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:34.396767656 +0000 UTC m=+1522.122764706" watchObservedRunningTime="2025-12-05 07:11:34.427592413 +0000 UTC m=+1522.153589453" Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.439062 4863 scope.go:117] "RemoveContainer" containerID="63f913ec80a2272783d213863d6c62095fe7c1e692569e26a845c7d932a6bd4d" Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.445960 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.473733 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d75688ddc-hlnlx"] Dec 05 07:11:34 crc kubenswrapper[4863]: I1205 07:11:34.612853 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" path="/var/lib/kubelet/pods/685d51e1-55c0-4334-9e5c-fae1485c49ce/volumes" Dec 05 07:11:36 crc kubenswrapper[4863]: I1205 07:11:36.436200 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerStarted","Data":"317ef2eee14e624241ec945a7fdc4c1afe943f1965f78feb5dc586e82047769c"} Dec 05 07:11:38 crc kubenswrapper[4863]: I1205 07:11:38.464358 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:11:38 crc kubenswrapper[4863]: I1205 07:11:38.464947 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:11:38 crc kubenswrapper[4863]: I1205 07:11:38.466202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerStarted","Data":"7550e24c5734f210c7db5088ea7f7de89cf17da52278a4790f7d7af94780b9ee"} Dec 05 07:11:38 crc kubenswrapper[4863]: I1205 07:11:38.467188 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 07:11:38 crc kubenswrapper[4863]: I1205 07:11:38.500061 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.449233348 podStartE2EDuration="9.500044255s" podCreationTimestamp="2025-12-05 07:11:29 +0000 UTC" firstStartedPulling="2025-12-05 07:11:30.424528965 +0000 UTC m=+1518.150526005" lastFinishedPulling="2025-12-05 07:11:37.475339872 +0000 UTC m=+1525.201336912" observedRunningTime="2025-12-05 07:11:38.493613408 +0000 UTC m=+1526.219610469" watchObservedRunningTime="2025-12-05 07:11:38.500044255 +0000 UTC m=+1526.226041295" Dec 05 07:11:39 crc kubenswrapper[4863]: I1205 07:11:39.479900 4863 generic.go:334] "Generic (PLEG): container finished" podID="9817e0ed-1998-43c8-a8e7-b9f94d58d433" containerID="00bcf26dc0b9990e06d0e33384666ee00322bb913ab4306109d274baea0f46d4" exitCode=0 Dec 05 07:11:39 crc kubenswrapper[4863]: I1205 07:11:39.480110 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tzwm5" event={"ID":"9817e0ed-1998-43c8-a8e7-b9f94d58d433","Type":"ContainerDied","Data":"00bcf26dc0b9990e06d0e33384666ee00322bb913ab4306109d274baea0f46d4"} Dec 05 07:11:39 crc kubenswrapper[4863]: I1205 07:11:39.709006 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:39 crc kubenswrapper[4863]: I1205 07:11:39.709055 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.723730 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.724228 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.928313 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.996837 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts\") pod \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.996942 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data\") pod \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.997195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle\") pod \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " Dec 05 07:11:40 crc kubenswrapper[4863]: I1205 07:11:40.997249 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdg6k\" (UniqueName: \"kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k\") pod \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\" (UID: \"9817e0ed-1998-43c8-a8e7-b9f94d58d433\") " Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.004621 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k" (OuterVolumeSpecName: "kube-api-access-gdg6k") pod "9817e0ed-1998-43c8-a8e7-b9f94d58d433" (UID: "9817e0ed-1998-43c8-a8e7-b9f94d58d433"). InnerVolumeSpecName "kube-api-access-gdg6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.008653 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts" (OuterVolumeSpecName: "scripts") pod "9817e0ed-1998-43c8-a8e7-b9f94d58d433" (UID: "9817e0ed-1998-43c8-a8e7-b9f94d58d433"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.040034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data" (OuterVolumeSpecName: "config-data") pod "9817e0ed-1998-43c8-a8e7-b9f94d58d433" (UID: "9817e0ed-1998-43c8-a8e7-b9f94d58d433"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.042689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9817e0ed-1998-43c8-a8e7-b9f94d58d433" (UID: "9817e0ed-1998-43c8-a8e7-b9f94d58d433"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.100344 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdg6k\" (UniqueName: \"kubernetes.io/projected/9817e0ed-1998-43c8-a8e7-b9f94d58d433-kube-api-access-gdg6k\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.100386 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.100398 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.100409 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9817e0ed-1998-43c8-a8e7-b9f94d58d433-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.505844 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tzwm5" event={"ID":"9817e0ed-1998-43c8-a8e7-b9f94d58d433","Type":"ContainerDied","Data":"a23c096383ec8df2cdd31e7f91ad315a0920a785b98e2a6f58b3770d41503460"} Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.505900 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a23c096383ec8df2cdd31e7f91ad315a0920a785b98e2a6f58b3770d41503460" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.505976 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tzwm5" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.590843 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.593071 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.597867 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.716326 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.716609 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerName="nova-scheduler-scheduler" containerID="cri-o://a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" gracePeriod=30 Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.747849 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.748515 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-log" containerID="cri-o://47b48a389edbdc55351befcd565200840fae347bc65022071d6439e0fc3262ee" gracePeriod=30 Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.749412 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-api" containerID="cri-o://6c2aa8e2e74c298f68a431ba05d49309fee9ad00f8acd7d8c61f3238e48cfe23" gracePeriod=30 Dec 05 07:11:41 crc kubenswrapper[4863]: I1205 07:11:41.769995 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:42 crc kubenswrapper[4863]: E1205 07:11:42.364091 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:11:42 crc kubenswrapper[4863]: E1205 07:11:42.366955 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:11:42 crc kubenswrapper[4863]: E1205 07:11:42.368848 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:11:42 crc kubenswrapper[4863]: E1205 07:11:42.368903 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerName="nova-scheduler-scheduler" Dec 05 07:11:42 crc kubenswrapper[4863]: I1205 07:11:42.518365 4863 generic.go:334] "Generic (PLEG): container finished" podID="9458bac9-62a9-45a1-8554-e06e68b71993" containerID="47b48a389edbdc55351befcd565200840fae347bc65022071d6439e0fc3262ee" exitCode=143 Dec 05 07:11:42 crc kubenswrapper[4863]: I1205 07:11:42.519636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerDied","Data":"47b48a389edbdc55351befcd565200840fae347bc65022071d6439e0fc3262ee"} Dec 05 07:11:42 crc kubenswrapper[4863]: I1205 07:11:42.529654 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 07:11:43 crc kubenswrapper[4863]: I1205 07:11:43.526421 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" containerID="cri-o://757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9" gracePeriod=30 Dec 05 07:11:43 crc kubenswrapper[4863]: I1205 07:11:43.526514 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" containerID="cri-o://5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246" gracePeriod=30 Dec 05 07:11:44 crc kubenswrapper[4863]: I1205 07:11:44.539290 4863 generic.go:334] "Generic (PLEG): container finished" podID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerID="757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9" exitCode=143 Dec 05 07:11:44 crc kubenswrapper[4863]: I1205 07:11:44.539335 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerDied","Data":"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9"} Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.570374 4863 generic.go:334] "Generic (PLEG): container finished" podID="9458bac9-62a9-45a1-8554-e06e68b71993" containerID="6c2aa8e2e74c298f68a431ba05d49309fee9ad00f8acd7d8c61f3238e48cfe23" exitCode=0 Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.570657 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerDied","Data":"6c2aa8e2e74c298f68a431ba05d49309fee9ad00f8acd7d8c61f3238e48cfe23"} Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.572690 4863 generic.go:334] "Generic (PLEG): container finished" podID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerID="a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" exitCode=0 Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.572728 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e322902d-6cc2-4ff1-a549-08380d7e6010","Type":"ContainerDied","Data":"a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9"} Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.572755 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e322902d-6cc2-4ff1-a549-08380d7e6010","Type":"ContainerDied","Data":"3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35"} Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.572767 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fefa43640ed7c5f349aae84b5649fd9229c01fe93dd9b10b229635d0df55c35" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.573099 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.635938 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data\") pod \"e322902d-6cc2-4ff1-a549-08380d7e6010\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.636013 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4ffl\" (UniqueName: \"kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl\") pod \"e322902d-6cc2-4ff1-a549-08380d7e6010\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.636050 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle\") pod \"e322902d-6cc2-4ff1-a549-08380d7e6010\" (UID: \"e322902d-6cc2-4ff1-a549-08380d7e6010\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.650758 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl" (OuterVolumeSpecName: "kube-api-access-j4ffl") pod "e322902d-6cc2-4ff1-a549-08380d7e6010" (UID: "e322902d-6cc2-4ff1-a549-08380d7e6010"). InnerVolumeSpecName "kube-api-access-j4ffl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.670368 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data" (OuterVolumeSpecName: "config-data") pod "e322902d-6cc2-4ff1-a549-08380d7e6010" (UID: "e322902d-6cc2-4ff1-a549-08380d7e6010"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.689711 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e322902d-6cc2-4ff1-a549-08380d7e6010" (UID: "e322902d-6cc2-4ff1-a549-08380d7e6010"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.704326 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737549 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737683 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs5tm\" (UniqueName: \"kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737705 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737799 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737845 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.737906 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs\") pod \"9458bac9-62a9-45a1-8554-e06e68b71993\" (UID: \"9458bac9-62a9-45a1-8554-e06e68b71993\") " Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.738295 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.738312 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4ffl\" (UniqueName: \"kubernetes.io/projected/e322902d-6cc2-4ff1-a549-08380d7e6010-kube-api-access-j4ffl\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.738324 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e322902d-6cc2-4ff1-a549-08380d7e6010-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.738433 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs" (OuterVolumeSpecName: "logs") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.741885 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm" (OuterVolumeSpecName: "kube-api-access-qs5tm") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "kube-api-access-qs5tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.747243 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:43076->10.217.0.196:8775: read: connection reset by peer" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.747618 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:43062->10.217.0.196:8775: read: connection reset by peer" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.784245 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.803213 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data" (OuterVolumeSpecName: "config-data") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.808277 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.820596 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9458bac9-62a9-45a1-8554-e06e68b71993" (UID: "9458bac9-62a9-45a1-8554-e06e68b71993"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840099 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9458bac9-62a9-45a1-8554-e06e68b71993-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840137 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840149 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840162 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840171 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs5tm\" (UniqueName: \"kubernetes.io/projected/9458bac9-62a9-45a1-8554-e06e68b71993-kube-api-access-qs5tm\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:46 crc kubenswrapper[4863]: I1205 07:11:46.840181 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9458bac9-62a9-45a1-8554-e06e68b71993-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.347030 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.452195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs\") pod \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.452664 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle\") pod \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.452710 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data\") pod \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.452792 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlnvh\" (UniqueName: \"kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh\") pod \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.452826 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs\") pod \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\" (UID: \"cbbcd3a7-48f9-4a33-982f-804c4e17e815\") " Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.453617 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs" (OuterVolumeSpecName: "logs") pod "cbbcd3a7-48f9-4a33-982f-804c4e17e815" (UID: "cbbcd3a7-48f9-4a33-982f-804c4e17e815"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.454140 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbbcd3a7-48f9-4a33-982f-804c4e17e815-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.458129 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh" (OuterVolumeSpecName: "kube-api-access-hlnvh") pod "cbbcd3a7-48f9-4a33-982f-804c4e17e815" (UID: "cbbcd3a7-48f9-4a33-982f-804c4e17e815"). InnerVolumeSpecName "kube-api-access-hlnvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.496944 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data" (OuterVolumeSpecName: "config-data") pod "cbbcd3a7-48f9-4a33-982f-804c4e17e815" (UID: "cbbcd3a7-48f9-4a33-982f-804c4e17e815"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.494752 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cbbcd3a7-48f9-4a33-982f-804c4e17e815" (UID: "cbbcd3a7-48f9-4a33-982f-804c4e17e815"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.522750 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cbbcd3a7-48f9-4a33-982f-804c4e17e815" (UID: "cbbcd3a7-48f9-4a33-982f-804c4e17e815"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.555749 4863 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.555782 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.555791 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbbcd3a7-48f9-4a33-982f-804c4e17e815-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.555799 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlnvh\" (UniqueName: \"kubernetes.io/projected/cbbcd3a7-48f9-4a33-982f-804c4e17e815-kube-api-access-hlnvh\") on node \"crc\" DevicePath \"\"" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.583167 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9458bac9-62a9-45a1-8554-e06e68b71993","Type":"ContainerDied","Data":"efbb27a9b3be9b61fb8bf7c150a2168a7ad4ee4750a1570680eb08cab42dde17"} Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.583219 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.583233 4863 scope.go:117] "RemoveContainer" containerID="6c2aa8e2e74c298f68a431ba05d49309fee9ad00f8acd7d8c61f3238e48cfe23" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.586024 4863 generic.go:334] "Generic (PLEG): container finished" podID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerID="5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246" exitCode=0 Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.586109 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.595235 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerDied","Data":"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246"} Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.595285 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.595291 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cbbcd3a7-48f9-4a33-982f-804c4e17e815","Type":"ContainerDied","Data":"a1032a9c51daacc762fb0c8a6abce66d5c9194bb146ec23f1ec16e2f45fb6028"} Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.610847 4863 scope.go:117] "RemoveContainer" containerID="47b48a389edbdc55351befcd565200840fae347bc65022071d6439e0fc3262ee" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.648532 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.667031 4863 scope.go:117] "RemoveContainer" containerID="5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.667047 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705299 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705836 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="dnsmasq-dns" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705857 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="dnsmasq-dns" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705874 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-api" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705881 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-api" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705896 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705903 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705918 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="init" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705926 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="init" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705941 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerName="nova-scheduler-scheduler" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705947 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerName="nova-scheduler-scheduler" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705966 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.705973 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.705996 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-log" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706003 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-log" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.706013 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9817e0ed-1998-43c8-a8e7-b9f94d58d433" containerName="nova-manage" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706020 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9817e0ed-1998-43c8-a8e7-b9f94d58d433" containerName="nova-manage" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706218 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-api" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706242 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="685d51e1-55c0-4334-9e5c-fae1485c49ce" containerName="dnsmasq-dns" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706254 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" containerName="nova-scheduler-scheduler" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706270 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9817e0ed-1998-43c8-a8e7-b9f94d58d433" containerName="nova-manage" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706286 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-log" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706308 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" containerName="nova-metadata-metadata" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.706321 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" containerName="nova-api-log" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.707114 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.723158 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.737567 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.752068 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.765505 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-876mg\" (UniqueName: \"kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.765753 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.765965 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.768972 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.783538 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.800527 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.802149 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.805725 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.806659 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.809606 4863 scope.go:117] "RemoveContainer" containerID="757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.812214 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.831857 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.838831 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.840776 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.847820 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.847912 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.848200 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.857778 4863 scope.go:117] "RemoveContainer" containerID="5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.858740 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246\": container with ID starting with 5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246 not found: ID does not exist" containerID="5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.858770 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246"} err="failed to get container status \"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246\": rpc error: code = NotFound desc = could not find container \"5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246\": container with ID starting with 5f36aa77679a198d21d99ce20dead45892de8d4a0b7b5f4d71a25d17f0528246 not found: ID does not exist" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.858812 4863 scope.go:117] "RemoveContainer" containerID="757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9" Dec 05 07:11:47 crc kubenswrapper[4863]: E1205 07:11:47.859846 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9\": container with ID starting with 757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9 not found: ID does not exist" containerID="757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.859907 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9"} err="failed to get container status \"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9\": rpc error: code = NotFound desc = could not find container \"757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9\": container with ID starting with 757c22ec4ea7d397b7a0c1480b8fde228e3c7b081b518a96d0644534d67c18b9 not found: ID does not exist" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.864416 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867563 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867646 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867711 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867741 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867812 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867841 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867956 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-876mg\" (UniqueName: \"kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.867994 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c84h\" (UniqueName: \"kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868023 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868053 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868335 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868396 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t49s\" (UniqueName: \"kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868446 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.868517 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.871566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.871962 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.884450 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-876mg\" (UniqueName: \"kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg\") pod \"nova-scheduler-0\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " pod="openstack/nova-scheduler-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.970858 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t49s\" (UniqueName: \"kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.970945 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.970975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971026 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971097 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971117 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971181 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c84h\" (UniqueName: \"kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971204 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971222 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.971658 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.972128 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.975362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.975539 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.975766 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.975774 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.976913 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.977547 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.978081 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.987703 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t49s\" (UniqueName: \"kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s\") pod \"nova-metadata-0\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " pod="openstack/nova-metadata-0" Dec 05 07:11:47 crc kubenswrapper[4863]: I1205 07:11:47.987942 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c84h\" (UniqueName: \"kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h\") pod \"nova-api-0\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " pod="openstack/nova-api-0" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.056151 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.125112 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.165242 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:11:48 crc kubenswrapper[4863]: W1205 07:11:48.625976 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd847c56f_38ef_4aaf_a974_b347f5091038.slice/crio-fcc66e85462c1fe5e8b7294c7f38eb5e87573c0489caa265f6e259472be38d04 WatchSource:0}: Error finding container fcc66e85462c1fe5e8b7294c7f38eb5e87573c0489caa265f6e259472be38d04: Status 404 returned error can't find the container with id fcc66e85462c1fe5e8b7294c7f38eb5e87573c0489caa265f6e259472be38d04 Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.641988 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9458bac9-62a9-45a1-8554-e06e68b71993" path="/var/lib/kubelet/pods/9458bac9-62a9-45a1-8554-e06e68b71993/volumes" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.642968 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbbcd3a7-48f9-4a33-982f-804c4e17e815" path="/var/lib/kubelet/pods/cbbcd3a7-48f9-4a33-982f-804c4e17e815/volumes" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.643840 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e322902d-6cc2-4ff1-a549-08380d7e6010" path="/var/lib/kubelet/pods/e322902d-6cc2-4ff1-a549-08380d7e6010/volumes" Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.646173 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.754409 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:11:48 crc kubenswrapper[4863]: I1205 07:11:48.765197 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:11:48 crc kubenswrapper[4863]: W1205 07:11:48.766451 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bcba2d9_46f7_4696_8c9d_3c4b6fea6d2c.slice/crio-f987a136754028cb01eec218aa516f67724f78f8baf0aff8113053bf15c6ef65 WatchSource:0}: Error finding container f987a136754028cb01eec218aa516f67724f78f8baf0aff8113053bf15c6ef65: Status 404 returned error can't find the container with id f987a136754028cb01eec218aa516f67724f78f8baf0aff8113053bf15c6ef65 Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.612130 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerStarted","Data":"2f4a46d4b57df2d6571ee3d4e278638e04cc4cea822ade66d2b1300772c45f1c"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.612505 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerStarted","Data":"d9b3e236cd29a2fd2e824692a5204ae8a950e3f494022435b0779812ac5c3dd1"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.612525 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerStarted","Data":"3ca4cffe58a91e72355ada98e05e32f145f842cea518e3bbea4d306abb6d490a"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.614229 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerStarted","Data":"021d8eac52f783b7221dafaa63c90c000b5e8040770750c9ddb21dc8b652ebeb"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.614272 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerStarted","Data":"61e8197d30bc11087e459d5b563b08a28f784f5a6f60b721e93bb7a88955158e"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.614285 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerStarted","Data":"f987a136754028cb01eec218aa516f67724f78f8baf0aff8113053bf15c6ef65"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.616121 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d847c56f-38ef-4aaf-a974-b347f5091038","Type":"ContainerStarted","Data":"75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.616202 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d847c56f-38ef-4aaf-a974-b347f5091038","Type":"ContainerStarted","Data":"fcc66e85462c1fe5e8b7294c7f38eb5e87573c0489caa265f6e259472be38d04"} Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.632943 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.632924077 podStartE2EDuration="2.632924077s" podCreationTimestamp="2025-12-05 07:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:49.628099161 +0000 UTC m=+1537.354096201" watchObservedRunningTime="2025-12-05 07:11:49.632924077 +0000 UTC m=+1537.358921117" Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.656306 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.656283314 podStartE2EDuration="2.656283314s" podCreationTimestamp="2025-12-05 07:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:49.648165847 +0000 UTC m=+1537.374162907" watchObservedRunningTime="2025-12-05 07:11:49.656283314 +0000 UTC m=+1537.382280354" Dec 05 07:11:49 crc kubenswrapper[4863]: I1205 07:11:49.671183 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.671160014 podStartE2EDuration="2.671160014s" podCreationTimestamp="2025-12-05 07:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:11:49.663430687 +0000 UTC m=+1537.389427737" watchObservedRunningTime="2025-12-05 07:11:49.671160014 +0000 UTC m=+1537.397157054" Dec 05 07:11:53 crc kubenswrapper[4863]: I1205 07:11:53.057041 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 07:11:53 crc kubenswrapper[4863]: I1205 07:11:53.125418 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:11:53 crc kubenswrapper[4863]: I1205 07:11:53.125594 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.057203 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.088838 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.126165 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.126210 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.166650 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.166978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 07:11:58 crc kubenswrapper[4863]: I1205 07:11:58.750287 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 07:11:59 crc kubenswrapper[4863]: I1205 07:11:59.140724 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:59 crc kubenswrapper[4863]: I1205 07:11:59.140752 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:59 crc kubenswrapper[4863]: I1205 07:11:59.180684 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:59 crc kubenswrapper[4863]: I1205 07:11:59.180702 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 07:11:59 crc kubenswrapper[4863]: I1205 07:11:59.816793 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.132085 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.132967 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.143721 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.176041 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.176616 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.177578 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.191235 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.463971 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.464018 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.464055 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.464613 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.464664 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" gracePeriod=600 Dec 05 07:12:08 crc kubenswrapper[4863]: E1205 07:12:08.587530 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.804594 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" exitCode=0 Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.804715 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9"} Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.804793 4863 scope.go:117] "RemoveContainer" containerID="53b1c2ce85bd96c615b503118a891dbcf19dd50f3a5e192bde28113e6752a251" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.805531 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.805728 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: E1205 07:12:08.805803 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.813747 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 07:12:08 crc kubenswrapper[4863]: I1205 07:12:08.814908 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 07:12:20 crc kubenswrapper[4863]: I1205 07:12:20.602066 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:12:20 crc kubenswrapper[4863]: E1205 07:12:20.603000 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.382779 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.383527 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="e2a23ba4-22d9-4750-8d39-53dff19bc328" containerName="openstackclient" containerID="cri-o://c2aaeca34f1d08b6ba77903d3594d5cd3cc71ad58a3b1e953f6d010118039c7d" gracePeriod=2 Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.398766 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.749778 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.796344 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:27 crc kubenswrapper[4863]: E1205 07:12:27.796835 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a23ba4-22d9-4750-8d39-53dff19bc328" containerName="openstackclient" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.796852 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a23ba4-22d9-4750-8d39-53dff19bc328" containerName="openstackclient" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.797053 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a23ba4-22d9-4750-8d39-53dff19bc328" containerName="openstackclient" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.797783 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.852776 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:27 crc kubenswrapper[4863]: E1205 07:12:27.875714 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:27 crc kubenswrapper[4863]: E1205 07:12:27.875782 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data podName:c908ae8d-4ec2-4938-819c-0ba2ee26f209 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:28.375761932 +0000 UTC m=+1576.101758972 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data") pod "rabbitmq-cell1-server-0" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209") : configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.953164 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.954733 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="openstack-network-exporter" containerID="cri-o://03c61e5fe864e52cfdadb9c8d6acf54051414a60811bc92e6c566db8db33dd09" gracePeriod=300 Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.978727 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnlrt\" (UniqueName: \"kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.978773 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:27 crc kubenswrapper[4863]: I1205 07:12:27.991098 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-6xkg4"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.012797 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-6xkg4"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.090921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnlrt\" (UniqueName: \"kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.091002 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.091981 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.122767 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.140541 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.194430 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnlrt\" (UniqueName: \"kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt\") pod \"cinderd676-account-delete-7ks4z\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.195022 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.195701 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsfb7\" (UniqueName: \"kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.195963 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.212949 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.238362 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.266531 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement1771-account-delete-h4fq8"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.268116 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.298897 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.298963 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsfb7\" (UniqueName: \"kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.299782 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.309539 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.309917 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="openstack-network-exporter" containerID="cri-o://65f72860b8f159ae7a8d700f8a8fae701459f9c4e811682a22537f30d3cca929" gracePeriod=300 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.333290 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement1771-account-delete-h4fq8"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.377528 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-7xsrt"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.385185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsfb7\" (UniqueName: \"kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7\") pod \"neutronadda-account-delete-wpdtm\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.392508 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="ovsdbserver-nb" containerID="cri-o://0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" gracePeriod=300 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.399862 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="ovsdbserver-sb" containerID="cri-o://5859f9a814df63156a5f73e63f537579a8e080d8ff53756995610a17d058be68" gracePeriod=300 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.401025 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2w4t\" (UniqueName: \"kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.401098 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.402057 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.402179 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data podName:c908ae8d-4ec2-4938-819c-0ba2ee26f209 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:29.402163629 +0000 UTC m=+1577.128160669 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data") pod "rabbitmq-cell1-server-0" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209") : configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.402408 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.402453 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data podName:46586650-4568-4f5e-9854-30f6e0291b6b nodeName:}" failed. No retries permitted until 2025-12-05 07:12:28.902441436 +0000 UTC m=+1576.628438466 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data") pod "rabbitmq-server-0" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b") : configmap "rabbitmq-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.406419 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-7xsrt"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.469650 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.471342 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.503816 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2w4t\" (UniqueName: \"kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.503888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.504784 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.513662 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.565157 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2w4t\" (UniqueName: \"kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t\") pod \"placement1771-account-delete-h4fq8\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.565619 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.586669 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.604540 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.605872 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.623003 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.623179 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6h9x\" (UniqueName: \"kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.727945 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83a84e66-3bc2-4629-b251-40287f224f1b" path="/var/lib/kubelet/pods/83a84e66-3bc2-4629-b251-40287f224f1b/volumes" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.739687 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl4nc\" (UniqueName: \"kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.739750 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.739929 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.740015 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6h9x\" (UniqueName: \"kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.745639 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.746293 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="853fd865-612f-4875-8c38-c6d67e486c0e" path="/var/lib/kubelet/pods/853fd865-612f-4875-8c38-c6d67e486c0e/volumes" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.747275 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.747313 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-cfznd"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.747328 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-cfznd"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.770194 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.770426 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" containerID="cri-o://8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" gracePeriod=30 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.770930 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="openstack-network-exporter" containerID="cri-o://52e26abc5b133fb5aeaaf4dccc14824d17b6a85c491f8151e997e9bfef541884" gracePeriod=30 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.776073 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6h9x\" (UniqueName: \"kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x\") pod \"barbican6b9e-account-delete-7nrpl\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.788841 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-dgwg4"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.801936 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-dgwg4"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.816920 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-vcb5r"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.850710 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl4nc\" (UniqueName: \"kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.850771 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.852649 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.865846 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.873130 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-vcb5r"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.899635 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.899869 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-48nzq" podUID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" containerName="openstack-network-exporter" containerID="cri-o://95a3d02a39520c799ce763e3863b5a722c2d59557d6dc506840feb931175f0ff" gracePeriod=30 Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.903136 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.911554 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.915384 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl4nc\" (UniqueName: \"kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc\") pod \"glanceddac-account-delete-hlrff\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.926905 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.928187 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.970379 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: E1205 07:12:28.970454 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data podName:46586650-4568-4f5e-9854-30f6e0291b6b nodeName:}" failed. No retries permitted until 2025-12-05 07:12:29.970434891 +0000 UTC m=+1577.696431931 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data") pod "rabbitmq-server-0" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b") : configmap "rabbitmq-config-data" not found Dec 05 07:12:28 crc kubenswrapper[4863]: I1205 07:12:28.973351 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.016671 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.023184 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.027111 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-4xpt9"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.037817 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-4xpt9"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.038181 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.048598 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.072912 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.073778 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="dnsmasq-dns" containerID="cri-o://415e080e8217329e1a50efac7ddcd01bbbc76c58058e18c23ff48ed51c7a70ce" gracePeriod=10 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.083798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.084086 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7mx6\" (UniqueName: \"kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.110356 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_132b3c8e-e25f-44ee-9d67-eccb0c2f8f91/ovsdbserver-nb/0.log" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.110407 4863 generic.go:334] "Generic (PLEG): container finished" podID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerID="65f72860b8f159ae7a8d700f8a8fae701459f9c4e811682a22537f30d3cca929" exitCode=2 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.110426 4863 generic.go:334] "Generic (PLEG): container finished" podID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerID="0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" exitCode=143 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.113612 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerDied","Data":"65f72860b8f159ae7a8d700f8a8fae701459f9c4e811682a22537f30d3cca929"} Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.113767 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerDied","Data":"0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64"} Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.159537 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.161627 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="cinder-scheduler" containerID="cri-o://95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.162086 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="probe" containerID="cri-o://cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.183128 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfff4892-c0b7-411f-9921-329db358dcde/ovsdbserver-sb/0.log" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.183282 4863 generic.go:334] "Generic (PLEG): container finished" podID="cfff4892-c0b7-411f-9921-329db358dcde" containerID="03c61e5fe864e52cfdadb9c8d6acf54051414a60811bc92e6c566db8db33dd09" exitCode=2 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.183388 4863 generic.go:334] "Generic (PLEG): container finished" podID="cfff4892-c0b7-411f-9921-329db358dcde" containerID="5859f9a814df63156a5f73e63f537579a8e080d8ff53756995610a17d058be68" exitCode=143 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.183559 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerDied","Data":"03c61e5fe864e52cfdadb9c8d6acf54051414a60811bc92e6c566db8db33dd09"} Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.183680 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerDied","Data":"5859f9a814df63156a5f73e63f537579a8e080d8ff53756995610a17d058be68"} Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.223985 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64 is running failed: container process not found" containerID="0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.224643 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64 is running failed: container process not found" containerID="0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.224928 4863 generic.go:334] "Generic (PLEG): container finished" podID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerID="52e26abc5b133fb5aeaaf4dccc14824d17b6a85c491f8151e997e9bfef541884" exitCode=2 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.225015 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerDied","Data":"52e26abc5b133fb5aeaaf4dccc14824d17b6a85c491f8151e997e9bfef541884"} Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.225078 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64 is running failed: container process not found" containerID="0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.225106 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="ovsdbserver-nb" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.260776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.260829 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptfpp\" (UniqueName: \"kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.260939 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7mx6\" (UniqueName: \"kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.261122 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.274141 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.283543 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.283817 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api-log" containerID="cri-o://020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.284612 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api" containerID="cri-o://8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.306740 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.307200 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-659b8866f7-wfh8q" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-api" containerID="cri-o://3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.307321 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-659b8866f7-wfh8q" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-httpd" containerID="cri-o://026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.315644 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7mx6\" (UniqueName: \"kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6\") pod \"novaapia811-account-delete-9vntl\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.354026 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.354360 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-54d5bbb8cd-6z8nd" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-log" containerID="cri-o://de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.354857 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-54d5bbb8cd-6z8nd" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-api" containerID="cri-o://ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.375536 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.375632 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptfpp\" (UniqueName: \"kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.387731 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.389509 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.389960 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-server" containerID="cri-o://cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390370 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="swift-recon-cron" containerID="cri-o://e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390408 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="rsync" containerID="cri-o://17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390439 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-expirer" containerID="cri-o://84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390483 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-updater" containerID="cri-o://e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390521 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-auditor" containerID="cri-o://471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390564 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-replicator" containerID="cri-o://eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390593 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-server" containerID="cri-o://28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390624 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-updater" containerID="cri-o://f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390655 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-auditor" containerID="cri-o://25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390683 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-replicator" containerID="cri-o://98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390709 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-server" containerID="cri-o://25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390739 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-reaper" containerID="cri-o://0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390768 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-auditor" containerID="cri-o://216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.390797 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-replicator" containerID="cri-o://24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.398975 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.401083 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptfpp\" (UniqueName: \"kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp\") pod \"novacell0ed59-account-delete-v9rg9\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.475816 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-tzwm5"] Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.478970 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.479033 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data podName:c908ae8d-4ec2-4938-819c-0ba2ee26f209 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:31.479008437 +0000 UTC m=+1579.205005477 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data") pod "rabbitmq-cell1-server-0" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209") : configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.528043 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-tzwm5"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.638625 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-p4rsg"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.647287 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.664746 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-p4rsg"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.720027 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.737992 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.752716 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-log" containerID="cri-o://0e90f617aa6517298bfd7f2ba20833ae5a734185ffcd027d050e5645935a88c4" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.753370 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-httpd" containerID="cri-o://45f9964a635141593c0ecd0b472b9d0197658218a0e29d30f979c3117ddf5090" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.763296 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.763580 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-log" containerID="cri-o://a6a3f3263a8ecdd3d1951f70646009a4f53fd5264aa1aae420054b8eeff1e7cd" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.763735 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-httpd" containerID="cri-o://1fb7878f5faa41f5bfe6c62080b32d439375f5649baed85652cf33bd1cdfde23" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.775899 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.776199 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-log" containerID="cri-o://61e8197d30bc11087e459d5b563b08a28f784f5a6f60b721e93bb7a88955158e" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.776361 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-api" containerID="cri-o://021d8eac52f783b7221dafaa63c90c000b5e8040770750c9ddb21dc8b652ebeb" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.794694 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.795419 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" containerName="nova-scheduler-scheduler" containerID="cri-o://75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.814679 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.822059 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="rabbitmq" containerID="cri-o://60b413aacbb400385f318f7b88f361cca235df215614238ffe8edc91b3a7e32f" gracePeriod=604800 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.846547 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.862092 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.862302 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" containerID="cri-o://d9b3e236cd29a2fd2e824692a5204ae8a950e3f494022435b0779812ac5c3dd1" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.862457 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" containerID="cri-o://2f4a46d4b57df2d6571ee3d4e278638e04cc4cea822ade66d2b1300772c45f1c" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.866510 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.866779 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-776db75b76-jmjll" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api-log" containerID="cri-o://07329e6fbf3ef6acbf027dc83e5cf92b53de3edc0cffa4a41162cabe931ecb30" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.867907 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-776db75b76-jmjll" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api" containerID="cri-o://9ee5b5095f60b3fcf7b47a34050adc8fc3929d01b34264ecd58fb01bc25bd387" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.873660 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.873968 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-594bb7dbb9-862q2" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker-log" containerID="cri-o://2d5bab542db02ef97af154097578dd404d6d1379735eb1ccb0151edc08a521c2" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.874120 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-594bb7dbb9-862q2" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker" containerID="cri-o://de7ae6772610824286f4a36b1feda8b4ab485b788e3f580e7258992e82f32077" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.884198 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3b11-account-create-update-st5zs"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.898200 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-659b8866f7-wfh8q" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.149:9696/\": read tcp 10.217.0.2:51402->10.217.0.149:9696: read: connection reset by peer" Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.915061 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.915991 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener-log" containerID="cri-o://8c2f71b77923ff40514479e86a2a93d8a6db31c3b4b1aa7bee31460d5cfebb4a" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.916322 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener" containerID="cri-o://f9cbbd75cf4e441651ca07035330ebfbbf0163c249a21a5f6b8d9986b0edde6a" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.924149 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.924344 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d" gracePeriod=30 Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.949654 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-7j8b6"] Dec 05 07:12:29 crc kubenswrapper[4863]: I1205 07:12:29.969721 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3b11-account-create-update-st5zs"] Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.994988 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 07:12:29 crc kubenswrapper[4863]: E1205 07:12:29.995053 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data podName:46586650-4568-4f5e-9854-30f6e0291b6b nodeName:}" failed. No retries permitted until 2025-12-05 07:12:31.995034632 +0000 UTC m=+1579.721031672 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data") pod "rabbitmq-server-0" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b") : configmap "rabbitmq-config-data" not found Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.011702 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-7j8b6"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.068818 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="rabbitmq" containerID="cri-o://7d926cefd2b8f2782d82e3be6c2ab3724e1e4e1d45592a80014e9eb9c83211c5" gracePeriod=604800 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.135702 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mdf66"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.159577 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mdf66"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.189410 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.189669 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerName="nova-cell1-conductor-conductor" containerID="cri-o://5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" gracePeriod=30 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.213514 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.213757 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" gracePeriod=30 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.237833 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-txgnc"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.264017 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-txgnc"] Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.314401 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="galera" containerID="cri-o://c82e5e7750a9dba29e1d0474f6d5fc28daeb3e913391aab3c135eb318d4d0076" gracePeriod=30 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354065 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354098 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354105 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354111 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354118 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354124 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354130 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354136 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354141 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354147 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354153 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354215 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354240 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354251 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354267 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354276 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354284 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354293 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354302 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354310 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.354319 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.396317 4863 generic.go:334] "Generic (PLEG): container finished" podID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerID="020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.396394 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerDied","Data":"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.415418 4863 generic.go:334] "Generic (PLEG): container finished" podID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerID="026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.415530 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerDied","Data":"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.427716 4863 generic.go:334] "Generic (PLEG): container finished" podID="e2a23ba4-22d9-4750-8d39-53dff19bc328" containerID="c2aaeca34f1d08b6ba77903d3594d5cd3cc71ad58a3b1e953f6d010118039c7d" exitCode=137 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.451088 4863 generic.go:334] "Generic (PLEG): container finished" podID="046f9d47-6b50-473f-838f-8375b6fe6389" containerID="de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.451190 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerDied","Data":"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.457513 4863 generic.go:334] "Generic (PLEG): container finished" podID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerID="2d5bab542db02ef97af154097578dd404d6d1379735eb1ccb0151edc08a521c2" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.457577 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerDied","Data":"2d5bab542db02ef97af154097578dd404d6d1379735eb1ccb0151edc08a521c2"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.460390 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_132b3c8e-e25f-44ee-9d67-eccb0c2f8f91/ovsdbserver-nb/0.log" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.460482 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.465332 4863 generic.go:334] "Generic (PLEG): container finished" podID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerID="a6a3f3263a8ecdd3d1951f70646009a4f53fd5264aa1aae420054b8eeff1e7cd" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.465439 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerDied","Data":"a6a3f3263a8ecdd3d1951f70646009a4f53fd5264aa1aae420054b8eeff1e7cd"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.532153 4863 generic.go:334] "Generic (PLEG): container finished" podID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerID="d9b3e236cd29a2fd2e824692a5204ae8a950e3f494022435b0779812ac5c3dd1" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.532239 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerDied","Data":"d9b3e236cd29a2fd2e824692a5204ae8a950e3f494022435b0779812ac5c3dd1"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.570425 4863 generic.go:334] "Generic (PLEG): container finished" podID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerID="8c2f71b77923ff40514479e86a2a93d8a6db31c3b4b1aa7bee31460d5cfebb4a" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.570501 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerDied","Data":"8c2f71b77923ff40514479e86a2a93d8a6db31c3b4b1aa7bee31460d5cfebb4a"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.572278 4863 generic.go:334] "Generic (PLEG): container finished" podID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerID="0e90f617aa6517298bfd7f2ba20833ae5a734185ffcd027d050e5645935a88c4" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.572325 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerDied","Data":"0e90f617aa6517298bfd7f2ba20833ae5a734185ffcd027d050e5645935a88c4"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.575033 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_132b3c8e-e25f-44ee-9d67-eccb0c2f8f91/ovsdbserver-nb/0.log" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.575327 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91","Type":"ContainerDied","Data":"a2a6bf8802b25dc77e1526161ae9751253ebc180e4bb7dc962f60aa72b7997a6"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.575362 4863 scope.go:117] "RemoveContainer" containerID="65f72860b8f159ae7a8d700f8a8fae701459f9c4e811682a22537f30d3cca929" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.575503 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 07:12:30 crc kubenswrapper[4863]: E1205 07:12:30.592719 4863 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 05 07:12:30 crc kubenswrapper[4863]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 07:12:30 crc kubenswrapper[4863]: + source /usr/local/bin/container-scripts/functions Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNBridge=br-int Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNRemote=tcp:localhost:6642 Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNEncapType=geneve Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNAvailabilityZones= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ EnableChassisAsGateway=true Dec 05 07:12:30 crc kubenswrapper[4863]: ++ PhysicalNetworks= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNHostName= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 07:12:30 crc kubenswrapper[4863]: ++ ovs_dir=/var/lib/openvswitch Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 07:12:30 crc kubenswrapper[4863]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + cleanup_ovsdb_server_semaphore Dec 05 07:12:30 crc kubenswrapper[4863]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 07:12:30 crc kubenswrapper[4863]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-nsmzq" message=< Dec 05 07:12:30 crc kubenswrapper[4863]: Exiting ovsdb-server (5) [ OK ] Dec 05 07:12:30 crc kubenswrapper[4863]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 07:12:30 crc kubenswrapper[4863]: + source /usr/local/bin/container-scripts/functions Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNBridge=br-int Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNRemote=tcp:localhost:6642 Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNEncapType=geneve Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNAvailabilityZones= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ EnableChassisAsGateway=true Dec 05 07:12:30 crc kubenswrapper[4863]: ++ PhysicalNetworks= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNHostName= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 07:12:30 crc kubenswrapper[4863]: ++ ovs_dir=/var/lib/openvswitch Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 07:12:30 crc kubenswrapper[4863]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + cleanup_ovsdb_server_semaphore Dec 05 07:12:30 crc kubenswrapper[4863]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 07:12:30 crc kubenswrapper[4863]: > Dec 05 07:12:30 crc kubenswrapper[4863]: E1205 07:12:30.592766 4863 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 05 07:12:30 crc kubenswrapper[4863]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 07:12:30 crc kubenswrapper[4863]: + source /usr/local/bin/container-scripts/functions Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNBridge=br-int Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNRemote=tcp:localhost:6642 Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNEncapType=geneve Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNAvailabilityZones= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ EnableChassisAsGateway=true Dec 05 07:12:30 crc kubenswrapper[4863]: ++ PhysicalNetworks= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ OVNHostName= Dec 05 07:12:30 crc kubenswrapper[4863]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 07:12:30 crc kubenswrapper[4863]: ++ ovs_dir=/var/lib/openvswitch Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 07:12:30 crc kubenswrapper[4863]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 07:12:30 crc kubenswrapper[4863]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + sleep 0.5 Dec 05 07:12:30 crc kubenswrapper[4863]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 07:12:30 crc kubenswrapper[4863]: + cleanup_ovsdb_server_semaphore Dec 05 07:12:30 crc kubenswrapper[4863]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 07:12:30 crc kubenswrapper[4863]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 07:12:30 crc kubenswrapper[4863]: > pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" containerID="cri-o://4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.592800 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" containerID="cri-o://4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" gracePeriod=29 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.606144 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-48nzq_cc4802ae-16f2-4b9e-a153-b48e9c8325b8/openstack-network-exporter/0.log" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.606186 4863 generic.go:334] "Generic (PLEG): container finished" podID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" containerID="95a3d02a39520c799ce763e3863b5a722c2d59557d6dc506840feb931175f0ff" exitCode=2 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.616317 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03722336-ca3d-42e4-95d8-f9fd1c092124" path="/var/lib/kubelet/pods/03722336-ca3d-42e4-95d8-f9fd1c092124/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.616832 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40cca709-0714-44d5-9105-02eab2284e98" path="/var/lib/kubelet/pods/40cca709-0714-44d5-9105-02eab2284e98/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.617770 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63784fd1-baa1-4334-96cf-b1467c661030" path="/var/lib/kubelet/pods/63784fd1-baa1-4334-96cf-b1467c661030/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.618291 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9723e99f-fe23-4e42-a037-c31bf410036a" path="/var/lib/kubelet/pods/9723e99f-fe23-4e42-a037-c31bf410036a/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.618848 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9817e0ed-1998-43c8-a8e7-b9f94d58d433" path="/var/lib/kubelet/pods/9817e0ed-1998-43c8-a8e7-b9f94d58d433/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.619770 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dd9d764-3ac5-4749-a947-5a61426ae7da" path="/var/lib/kubelet/pods/9dd9d764-3ac5-4749-a947-5a61426ae7da/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.620499 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8b5f081-158a-4d6c-a16f-c1b90548ee63" path="/var/lib/kubelet/pods/a8b5f081-158a-4d6c-a16f-c1b90548ee63/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.621157 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6b7f621-27c8-4603-9de2-23f2fcec007b" path="/var/lib/kubelet/pods/b6b7f621-27c8-4603-9de2-23f2fcec007b/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.621745 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7e691e7-81f8-4d74-8d20-f679070e3321" path="/var/lib/kubelet/pods/b7e691e7-81f8-4d74-8d20-f679070e3321/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.621914 4863 generic.go:334] "Generic (PLEG): container finished" podID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerID="07329e6fbf3ef6acbf027dc83e5cf92b53de3edc0cffa4a41162cabe931ecb30" exitCode=143 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.624038 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.624660 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd764eca-5968-479c-9a85-34360cc81ee2" path="/var/lib/kubelet/pods/bd764eca-5968-479c-9a85-34360cc81ee2/volumes" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.642129 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-48nzq" event={"ID":"cc4802ae-16f2-4b9e-a153-b48e9c8325b8","Type":"ContainerDied","Data":"95a3d02a39520c799ce763e3863b5a722c2d59557d6dc506840feb931175f0ff"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.642227 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerDied","Data":"07329e6fbf3ef6acbf027dc83e5cf92b53de3edc0cffa4a41162cabe931ecb30"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649290 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649437 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649545 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn4jk\" (UniqueName: \"kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649605 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649644 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649678 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649718 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.649756 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir\") pod \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\" (UID: \"132b3c8e-e25f-44ee-9d67-eccb0c2f8f91\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.661528 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config" (OuterVolumeSpecName: "config") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.666504 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.670133 4863 scope.go:117] "RemoveContainer" containerID="0166dda26bbe4b5acc8fab3357bb2a05507c806617a6ca6f1d94bc6e8db03c64" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.678180 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" containerID="cri-o://7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" gracePeriod=29 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.692887 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk" (OuterVolumeSpecName: "kube-api-access-dn4jk") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "kube-api-access-dn4jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.699153 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts" (OuterVolumeSpecName: "scripts") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.712644 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.716644 4863 generic.go:334] "Generic (PLEG): container finished" podID="753801f8-f439-415b-9674-08d58e53def8" containerID="415e080e8217329e1a50efac7ddcd01bbbc76c58058e18c23ff48ed51c7a70ce" exitCode=0 Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.716734 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" event={"ID":"753801f8-f439-415b-9674-08d58e53def8","Type":"ContainerDied","Data":"415e080e8217329e1a50efac7ddcd01bbbc76c58058e18c23ff48ed51c7a70ce"} Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.741590 4863 scope.go:117] "RemoveContainer" containerID="415e080e8217329e1a50efac7ddcd01bbbc76c58058e18c23ff48ed51c7a70ce" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761661 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761752 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761773 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761805 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjv9m\" (UniqueName: \"kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761901 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.761981 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc\") pod \"753801f8-f439-415b-9674-08d58e53def8\" (UID: \"753801f8-f439-415b-9674-08d58e53def8\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.763114 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.763136 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.763146 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn4jk\" (UniqueName: \"kubernetes.io/projected/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-kube-api-access-dn4jk\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.763165 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.763174 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.799484 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.819913 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfff4892-c0b7-411f-9921-329db358dcde/ovsdbserver-sb/0.log" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.819993 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.834937 4863 scope.go:117] "RemoveContainer" containerID="440dcc78223ce0c66f6100e309a5a5f235b573f32391c1fa38a3b6037c634c90" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.869437 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.881767 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m" (OuterVolumeSpecName: "kube-api-access-zjv9m") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "kube-api-access-zjv9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.885234 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970507 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970588 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970620 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970641 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970691 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970778 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz8j9\" (UniqueName: \"kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970841 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.970900 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir\") pod \"cfff4892-c0b7-411f-9921-329db358dcde\" (UID: \"cfff4892-c0b7-411f-9921-329db358dcde\") " Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.971326 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjv9m\" (UniqueName: \"kubernetes.io/projected/753801f8-f439-415b-9674-08d58e53def8-kube-api-access-zjv9m\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.971340 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.973208 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.974053 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts" (OuterVolumeSpecName: "scripts") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.975167 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config" (OuterVolumeSpecName: "config") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.978757 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:30 crc kubenswrapper[4863]: I1205 07:12:30.981118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9" (OuterVolumeSpecName: "kube-api-access-tz8j9") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "kube-api-access-tz8j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.009986 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.025019 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-48nzq_cc4802ae-16f2-4b9e-a153-b48e9c8325b8/openstack-network-exporter/0.log" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.025071 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.025523 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config" (OuterVolumeSpecName: "config") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.049727 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.070197 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.076614 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.076652 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfff4892-c0b7-411f-9921-329db358dcde-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.076665 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.076676 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfff4892-c0b7-411f-9921-329db358dcde-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.077060 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.079376 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.079406 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz8j9\" (UniqueName: \"kubernetes.io/projected/cfff4892-c0b7-411f-9921-329db358dcde-kube-api-access-tz8j9\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.079420 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.111681 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.180929 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181276 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181699 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret\") pod \"e2a23ba4-22d9-4750-8d39-53dff19bc328\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181739 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181800 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdhkj\" (UniqueName: \"kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181869 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181927 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs\") pod \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\" (UID: \"cc4802ae-16f2-4b9e-a153-b48e9c8325b8\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181950 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle\") pod \"e2a23ba4-22d9-4750-8d39-53dff19bc328\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.181974 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config\") pod \"e2a23ba4-22d9-4750-8d39-53dff19bc328\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.182111 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk64l\" (UniqueName: \"kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l\") pod \"e2a23ba4-22d9-4750-8d39-53dff19bc328\" (UID: \"e2a23ba4-22d9-4750-8d39-53dff19bc328\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.182570 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config" (OuterVolumeSpecName: "config") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.182617 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.184383 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.193572 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.198950 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj" (OuterVolumeSpecName: "kube-api-access-wdhkj") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "kube-api-access-wdhkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.209436 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.209995 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l" (OuterVolumeSpecName: "kube-api-access-kk64l") pod "e2a23ba4-22d9-4750-8d39-53dff19bc328" (UID: "e2a23ba4-22d9-4750-8d39-53dff19bc328"). InnerVolumeSpecName "kube-api-access-kk64l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.211994 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" (UID: "132b3c8e-e25f-44ee-9d67-eccb0c2f8f91"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.258082 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.278689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284917 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284944 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk64l\" (UniqueName: \"kubernetes.io/projected/e2a23ba4-22d9-4750-8d39-53dff19bc328-kube-api-access-kk64l\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284955 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284963 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284972 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284980 4863 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284988 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdhkj\" (UniqueName: \"kubernetes.io/projected/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-kube-api-access-wdhkj\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.284996 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.285004 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.285014 4863 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.290676 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "753801f8-f439-415b-9674-08d58e53def8" (UID: "753801f8-f439-415b-9674-08d58e53def8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.302397 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.311824 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2a23ba4-22d9-4750-8d39-53dff19bc328" (UID: "e2a23ba4-22d9-4750-8d39-53dff19bc328"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.357756 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e2a23ba4-22d9-4750-8d39-53dff19bc328" (UID: "e2a23ba4-22d9-4750-8d39-53dff19bc328"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.360702 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e2a23ba4-22d9-4750-8d39-53dff19bc328" (UID: "e2a23ba4-22d9-4750-8d39-53dff19bc328"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.385667 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388449 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388576 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388671 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388758 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a23ba4-22d9-4750-8d39-53dff19bc328-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388839 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e2a23ba4-22d9-4750-8d39-53dff19bc328-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.388909 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/753801f8-f439-415b-9674-08d58e53def8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.419573 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "cc4802ae-16f2-4b9e-a153-b48e9c8325b8" (UID: "cc4802ae-16f2-4b9e-a153-b48e9c8325b8"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.420040 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "cfff4892-c0b7-411f-9921-329db358dcde" (UID: "cfff4892-c0b7-411f-9921-329db358dcde"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.491295 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfff4892-c0b7-411f-9921-329db358dcde-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.491333 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4802ae-16f2-4b9e-a153-b48e9c8325b8-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: E1205 07:12:31.491405 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:31 crc kubenswrapper[4863]: E1205 07:12:31.491456 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data podName:c908ae8d-4ec2-4938-819c-0ba2ee26f209 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.491437656 +0000 UTC m=+1583.217434696 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data") pod "rabbitmq-cell1-server-0" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209") : configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.572098 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.610418 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:12:31 crc kubenswrapper[4863]: E1205 07:12:31.611010 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.655595 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.677705 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.705948 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.706410 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.706697 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-ff9694557-gb857" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-httpd" containerID="cri-o://f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd" gracePeriod=30 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.707151 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-ff9694557-gb857" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-server" containerID="cri-o://ac2c5273ab207bce433edc60614777727533cb3043b62817be91b5f725b80074" gracePeriod=30 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.707611 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.708274 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.708923 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8674\" (UniqueName: \"kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.708990 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.747281 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674" (OuterVolumeSpecName: "kube-api-access-f8674") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "kube-api-access-f8674". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.768537 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement1771-account-delete-h4fq8"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.789373 4863 generic.go:334] "Generic (PLEG): container finished" podID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerID="cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.789453 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerDied","Data":"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.792381 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.797136 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-48nzq_cc4802ae-16f2-4b9e-a153-b48e9c8325b8/openstack-network-exporter/0.log" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.799291 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-48nzq" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.799516 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-48nzq" event={"ID":"cc4802ae-16f2-4b9e-a153-b48e9c8325b8","Type":"ContainerDied","Data":"c5b8f74cf44bcb9653dea6310e657b71201908f935875bbacff8ff6cf65865ee"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.799631 4863 scope.go:117] "RemoveContainer" containerID="95a3d02a39520c799ce763e3863b5a722c2d59557d6dc506840feb931175f0ff" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.803958 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.811275 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.811301 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8674\" (UniqueName: \"kubernetes.io/projected/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-kube-api-access-f8674\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.844823 4863 generic.go:334] "Generic (PLEG): container finished" podID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerID="3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.844914 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"79d93942-6ec7-4fea-9e05-a9c831ad3dd3","Type":"ContainerDied","Data":"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.844942 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"79d93942-6ec7-4fea-9e05-a9c831ad3dd3","Type":"ContainerDied","Data":"14c9a0def12e2f96dde44e805f6c9e16de142125c7b61e9853d336ee312bd476"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.845002 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.887460 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.917943 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data" (OuterVolumeSpecName: "config-data") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.925819 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.931060 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-48nzq"] Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.937922 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") pod \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\" (UID: \"79d93942-6ec7-4fea-9e05-a9c831ad3dd3\") " Dec 05 07:12:31 crc kubenswrapper[4863]: W1205 07:12:31.938053 4863 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/79d93942-6ec7-4fea-9e05-a9c831ad3dd3/volumes/kubernetes.io~secret/config-data Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.938076 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data" (OuterVolumeSpecName: "config-data") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.940262 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.940292 4863 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964756 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964787 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964818 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964815 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964858 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.964868 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3"} Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.987648 4863 generic.go:334] "Generic (PLEG): container finished" podID="fd957476-007c-4882-8449-96deebe6a63c" containerID="c82e5e7750a9dba29e1d0474f6d5fc28daeb3e913391aab3c135eb318d4d0076" exitCode=0 Dec 05 07:12:31 crc kubenswrapper[4863]: I1205 07:12:31.988037 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerDied","Data":"c82e5e7750a9dba29e1d0474f6d5fc28daeb3e913391aab3c135eb318d4d0076"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.005903 4863 generic.go:334] "Generic (PLEG): container finished" podID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerID="61e8197d30bc11087e459d5b563b08a28f784f5a6f60b721e93bb7a88955158e" exitCode=143 Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.005975 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerDied","Data":"61e8197d30bc11087e459d5b563b08a28f784f5a6f60b721e93bb7a88955158e"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.007612 4863 scope.go:117] "RemoveContainer" containerID="3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.011423 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderd676-account-delete-7ks4z" event={"ID":"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1","Type":"ContainerStarted","Data":"a068483e959f02649f7b1e04ebce07bb70283c18af4505e5671b518d62456e55"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.011460 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderd676-account-delete-7ks4z" event={"ID":"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1","Type":"ContainerStarted","Data":"56af4b56fba7f0f76551dcdec90ee833fa473dbff47da617d0e51b0416687089"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.040989 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.046210 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "79d93942-6ec7-4fea-9e05-a9c831ad3dd3" (UID: "79d93942-6ec7-4fea-9e05-a9c831ad3dd3"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.049483 4863 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/79d93942-6ec7-4fea-9e05-a9c831ad3dd3-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.049549 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.049989 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data podName:46586650-4568-4f5e-9854-30f6e0291b6b nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.049970072 +0000 UTC m=+1583.775967102 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data") pod "rabbitmq-server-0" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b") : configmap "rabbitmq-config-data" not found Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.053484 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinderd676-account-delete-7ks4z" podStartSLOduration=5.053459606 podStartE2EDuration="5.053459606s" podCreationTimestamp="2025-12-05 07:12:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:32.046897678 +0000 UTC m=+1579.772894718" watchObservedRunningTime="2025-12-05 07:12:32.053459606 +0000 UTC m=+1579.779456646" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.069154 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement1771-account-delete-h4fq8" event={"ID":"afae6292-c1df-4dd3-abec-d1f493c03857","Type":"ContainerStarted","Data":"b845b978cf5b213dcc4db68e78e700da10c02ad74a2cd85998ebd530a99ed44c"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.076797 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.083807 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.088009 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfff4892-c0b7-411f-9921-329db358dcde/ovsdbserver-sb/0.log" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.088073 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfff4892-c0b7-411f-9921-329db358dcde","Type":"ContainerDied","Data":"344e6fed287e36d5867f5261bf0564a6b4c473cc2e0036d5e842713f9202a44c"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.088135 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.095208 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.104994 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" event={"ID":"753801f8-f439-415b-9674-08d58e53def8","Type":"ContainerDied","Data":"b4ec874fd7e56578c26a7b3813ef3559afdf5dfdefe30b335a2bb7e7c2f7939c"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.105121 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf4ff87b5-5kjhr" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.125142 4863 scope.go:117] "RemoveContainer" containerID="3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d" Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.128847 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d\": container with ID starting with 3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d not found: ID does not exist" containerID="3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.128914 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d"} err="failed to get container status \"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d\": rpc error: code = NotFound desc = could not find container \"3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d\": container with ID starting with 3322f8d1c4403aa69b73aa302453184bce8523e3d716b7b39fe963d43b571f9d not found: ID does not exist" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.128990 4863 scope.go:117] "RemoveContainer" containerID="c2aaeca34f1d08b6ba77903d3594d5cd3cc71ad58a3b1e953f6d010118039c7d" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.152824 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxkth\" (UniqueName: \"kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153278 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153391 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153547 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153659 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153864 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.153988 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.154112 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs\") pod \"fd957476-007c-4882-8449-96deebe6a63c\" (UID: \"fd957476-007c-4882-8449-96deebe6a63c\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.155720 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.156221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.156952 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.159401 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" exitCode=0 Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.159691 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerDied","Data":"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150"} Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.160047 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.160733 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.163416 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth" (OuterVolumeSpecName: "kube-api-access-gxkth") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "kube-api-access-gxkth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.173782 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronadda-account-delete-wpdtm" event={"ID":"b5674946-023d-45c0-a0bf-373aa5d7ee65","Type":"ContainerStarted","Data":"21ca6ff77da49d569fe51a361bbf07624043c771f66bff6340425a9b7ddac3f9"} Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.196593 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.196700 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: W1205 07:12:32.196817 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9d1ac32_bc45_41a0_b696_034ff92b13d4.slice/crio-dcc817d82a320bf2049d52cd9804c52e097f76d17d0c31a7c470eb715621ff96 WatchSource:0}: Error finding container dcc817d82a320bf2049d52cd9804c52e097f76d17d0c31a7c470eb715621ff96: Status 404 returned error can't find the container with id dcc817d82a320bf2049d52cd9804c52e097f76d17d0c31a7c470eb715621ff96 Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.206416 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.207343 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.207419 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.212034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: W1205 07:12:32.226777 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81284a21_5f4d_4135_b08e_94415569eb09.slice/crio-1b171bb6a3fe720e2deaa231c4bcf99892feafb2b9ae09fcb7bbeabaaad7bc07 WatchSource:0}: Error finding container 1b171bb6a3fe720e2deaa231c4bcf99892feafb2b9ae09fcb7bbeabaaad7bc07: Status 404 returned error can't find the container with id 1b171bb6a3fe720e2deaa231c4bcf99892feafb2b9ae09fcb7bbeabaaad7bc07 Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.233975 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.265310 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fd957476-007c-4882-8449-96deebe6a63c-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.265345 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.265540 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.265592 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.280576 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.280645 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxkth\" (UniqueName: \"kubernetes.io/projected/fd957476-007c-4882-8449-96deebe6a63c-kube-api-access-gxkth\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.280660 4863 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.280672 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fd957476-007c-4882-8449-96deebe6a63c-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.311188 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.311223 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd957476-007c-4882-8449-96deebe6a63c" (UID: "fd957476-007c-4882-8449-96deebe6a63c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.327904 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.347171 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.355395 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lvrb5" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" probeResult="failure" output="" Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.358179 4863 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 05 07:12:32 crc kubenswrapper[4863]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-05T07:12:30Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 05 07:12:32 crc kubenswrapper[4863]: /etc/init.d/functions: line 589: 442 Alarm clock "$@" Dec 05 07:12:32 crc kubenswrapper[4863]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-lvrb5" message=< Dec 05 07:12:32 crc kubenswrapper[4863]: Exiting ovn-controller (1) [FAILED] Dec 05 07:12:32 crc kubenswrapper[4863]: Killing ovn-controller (1) [ OK ] Dec 05 07:12:32 crc kubenswrapper[4863]: 2025-12-05T07:12:30Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 05 07:12:32 crc kubenswrapper[4863]: /etc/init.d/functions: line 589: 442 Alarm clock "$@" Dec 05 07:12:32 crc kubenswrapper[4863]: > Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.358207 4863 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 05 07:12:32 crc kubenswrapper[4863]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-05T07:12:30Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 05 07:12:32 crc kubenswrapper[4863]: /etc/init.d/functions: line 589: 442 Alarm clock "$@" Dec 05 07:12:32 crc kubenswrapper[4863]: > pod="openstack/ovn-controller-lvrb5" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" containerID="cri-o://8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.358231 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-lvrb5" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" containerID="cri-o://8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe" gracePeriod=27 Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.386047 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.386081 4863 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.386093 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd957476-007c-4882-8449-96deebe6a63c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.459257 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3201b201_3f68_4c2e_858c_56f0c8242c68.slice/crio-conmon-f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc4802ae_16f2_4b9e_a153_b48e9c8325b8.slice/crio-c5b8f74cf44bcb9653dea6310e657b71201908f935875bbacff8ff6cf65865ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31a1b67b_8afd_4b9a_bd8d_48e183dbb6a1.slice/crio-conmon-a068483e959f02649f7b1e04ebce07bb70283c18af4505e5671b518d62456e55.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc4802ae_16f2_4b9e_a153_b48e9c8325b8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3201b201_3f68_4c2e_858c_56f0c8242c68.slice/crio-f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcee685a_e1e9_4dd8_b04d_c5719c9bf771.slice/crio-95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e.scope\": RecentStats: unable to find data in memory cache]" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.512665 4863 scope.go:117] "RemoveContainer" containerID="03c61e5fe864e52cfdadb9c8d6acf54051414a60811bc92e6c566db8db33dd09" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.574736 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.599612 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf4ff87b5-5kjhr"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.622247 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" path="/var/lib/kubelet/pods/132b3c8e-e25f-44ee-9d67-eccb0c2f8f91/volumes" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.623168 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="753801f8-f439-415b-9674-08d58e53def8" path="/var/lib/kubelet/pods/753801f8-f439-415b-9674-08d58e53def8/volumes" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.623809 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" path="/var/lib/kubelet/pods/cc4802ae-16f2-4b9e-a153-b48e9c8325b8/volumes" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.625614 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a23ba4-22d9-4750-8d39-53dff19bc328" path="/var/lib/kubelet/pods/e2a23ba4-22d9-4750-8d39-53dff19bc328/volumes" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.633045 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.633077 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.643899 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.645002 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.648869 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.650451 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:32 crc kubenswrapper[4863]: E1205 07:12:32.650498 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.667915 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.689952 4863 scope.go:117] "RemoveContainer" containerID="5859f9a814df63156a5f73e63f537579a8e080d8ff53756995610a17d058be68" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.840042 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896023 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896119 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5mrc\" (UniqueName: \"kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896147 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896184 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896216 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896299 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom\") pod \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\" (UID: \"dcee685a-e1e9-4dd8-b04d-c5719c9bf771\") " Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896540 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.896990 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.913262 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.913379 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts" (OuterVolumeSpecName: "scripts") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.935830 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc" (OuterVolumeSpecName: "kube-api-access-t5mrc") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "kube-api-access-t5mrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.998870 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.998908 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5mrc\" (UniqueName: \"kubernetes.io/projected/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-kube-api-access-t5mrc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:32 crc kubenswrapper[4863]: I1205 07:12:32.998922 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.044099 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.173:8776/healthcheck\": read tcp 10.217.0.2:59654->10.217.0.173:8776: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.080754 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.101413 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.110611 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.110677 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" containerName="nova-scheduler-scheduler" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.183723 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.176:9292/healthcheck\": read tcp 10.217.0.2:35832->10.217.0.176:9292: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.183735 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.176:9292/healthcheck\": read tcp 10.217.0.2:35842->10.217.0.176:9292: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.198772 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.205168 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.289978 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lvrb5_da88ee86-9914-4396-bb33-d00d24b00c59/ovn-controller/0.log" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.290021 4863 generic.go:334] "Generic (PLEG): container finished" podID="da88ee86-9914-4396-bb33-d00d24b00c59" containerID="8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe" exitCode=143 Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.290090 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5" event={"ID":"da88ee86-9914-4396-bb33-d00d24b00c59","Type":"ContainerDied","Data":"8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.300137 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data" (OuterVolumeSpecName: "config-data") pod "dcee685a-e1e9-4dd8-b04d-c5719c9bf771" (UID: "dcee685a-e1e9-4dd8-b04d-c5719c9bf771"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.319783 4863 generic.go:334] "Generic (PLEG): container finished" podID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerID="ac2c5273ab207bce433edc60614777727533cb3043b62817be91b5f725b80074" exitCode=0 Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.319821 4863 generic.go:334] "Generic (PLEG): container finished" podID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerID="f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd" exitCode=0 Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.319895 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerDied","Data":"ac2c5273ab207bce433edc60614777727533cb3043b62817be91b5f725b80074"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.319926 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerDied","Data":"f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.326631 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcee685a-e1e9-4dd8-b04d-c5719c9bf771-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.341649 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0ed59-account-delete-v9rg9" event={"ID":"e15a3f00-fce6-490e-9b6b-ca28d8334d25","Type":"ContainerStarted","Data":"4e3e6cba36f5f2ba67fc6d9696a91b34a32f6623e6f389254f47e7679895c09c"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.347207 4863 generic.go:334] "Generic (PLEG): container finished" podID="31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" containerID="a068483e959f02649f7b1e04ebce07bb70283c18af4505e5671b518d62456e55" exitCode=0 Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.347268 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderd676-account-delete-7ks4z" event={"ID":"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1","Type":"ContainerDied","Data":"a068483e959f02649f7b1e04ebce07bb70283c18af4505e5671b518d62456e55"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.350236 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fd957476-007c-4882-8449-96deebe6a63c","Type":"ContainerDied","Data":"9ea807b88a5bd50ab5eda3e4c9aeb167646aff425c65cb4f49602a086a83544f"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.350274 4863 scope.go:117] "RemoveContainer" containerID="c82e5e7750a9dba29e1d0474f6d5fc28daeb3e913391aab3c135eb318d4d0076" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.350385 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.396643 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6b9e-account-delete-7nrpl" event={"ID":"7b21df5e-065a-4c62-b271-704c86b97f58","Type":"ContainerStarted","Data":"05cf9d310f6f5930534f91e8433cfadec4cac29f103b9bff690bd665203c5d9f"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.396902 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6b9e-account-delete-7nrpl" event={"ID":"7b21df5e-065a-4c62-b271-704c86b97f58","Type":"ContainerStarted","Data":"2158e4fa57c0b26d966997da3d6c0b65ac62c2da4f2fd0ba488e1559cc7c76c0"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.425342 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.177:9292/healthcheck\": read tcp 10.217.0.2:41380->10.217.0.177:9292: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.425537 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.177:9292/healthcheck\": read tcp 10.217.0.2:41366->10.217.0.177:9292: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.453993 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": read tcp 10.217.0.2:34474->10.217.0.202:8775: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.454299 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": read tcp 10.217.0.2:34484->10.217.0.202:8775: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.469107 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-776db75b76-jmjll" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:55450->10.217.0.160:9311: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.469317 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-776db75b76-jmjll" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:55462->10.217.0.160:9311: read: connection reset by peer" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.487580 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican6b9e-account-delete-7nrpl" podStartSLOduration=5.487555519 podStartE2EDuration="5.487555519s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:33.439269978 +0000 UTC m=+1581.165267018" watchObservedRunningTime="2025-12-05 07:12:33.487555519 +0000 UTC m=+1581.213552559" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.489782 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement1771-account-delete-h4fq8" event={"ID":"afae6292-c1df-4dd3-abec-d1f493c03857","Type":"ContainerStarted","Data":"df76ce8ae2a6545ac0864b16219ff2541392407a17b3828ce1a57cca34eea46d"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.508583 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceddac-account-delete-hlrff" event={"ID":"d9d1ac32-bc45-41a0-b696-034ff92b13d4","Type":"ContainerStarted","Data":"f2662da293cfbe287c207bdc56fe94abd416d491d01023463a804b9131cd7e2e"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.508626 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceddac-account-delete-hlrff" event={"ID":"d9d1ac32-bc45-41a0-b696-034ff92b13d4","Type":"ContainerStarted","Data":"dcc817d82a320bf2049d52cd9804c52e097f76d17d0c31a7c470eb715621ff96"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.513005 4863 generic.go:334] "Generic (PLEG): container finished" podID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerID="95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e" exitCode=0 Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.513061 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerDied","Data":"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.513086 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dcee685a-e1e9-4dd8-b04d-c5719c9bf771","Type":"ContainerDied","Data":"cb2f597865964174fd799611b005b9e59ae5b272be4de78fc68244bdc47c2ccd"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.513132 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.516581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronadda-account-delete-wpdtm" event={"ID":"b5674946-023d-45c0-a0bf-373aa5d7ee65","Type":"ContainerStarted","Data":"f36ebe2df557c66ab7206b7a13a758e1a097e24e728e0632ac580409fc3c1b99"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.520233 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement1771-account-delete-h4fq8" podStartSLOduration=5.520215101 podStartE2EDuration="5.520215101s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:33.512138085 +0000 UTC m=+1581.238135125" watchObservedRunningTime="2025-12-05 07:12:33.520215101 +0000 UTC m=+1581.246212141" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.522457 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapia811-account-delete-9vntl" event={"ID":"81284a21-5f4d-4135-b08e-94415569eb09","Type":"ContainerStarted","Data":"ab248ea2b3fb7716d112afdff0208e09af5de52fa6ae81ec6d34fd454fabcb0c"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.522562 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapia811-account-delete-9vntl" event={"ID":"81284a21-5f4d-4135-b08e-94415569eb09","Type":"ContainerStarted","Data":"1b171bb6a3fe720e2deaa231c4bcf99892feafb2b9ae09fcb7bbeabaaad7bc07"} Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.543020 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glanceddac-account-delete-hlrff" podStartSLOduration=5.542998603 podStartE2EDuration="5.542998603s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:33.525854698 +0000 UTC m=+1581.251851738" watchObservedRunningTime="2025-12-05 07:12:33.542998603 +0000 UTC m=+1581.268995633" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.593580 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutronadda-account-delete-wpdtm" podStartSLOduration=5.593559469 podStartE2EDuration="5.593559469s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:33.554323258 +0000 UTC m=+1581.280320298" watchObservedRunningTime="2025-12-05 07:12:33.593559469 +0000 UTC m=+1581.319556509" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.607050 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapia811-account-delete-9vntl" podStartSLOduration=5.607028777 podStartE2EDuration="5.607028777s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:33.574319383 +0000 UTC m=+1581.300316443" watchObservedRunningTime="2025-12-05 07:12:33.607028777 +0000 UTC m=+1581.333025817" Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.721744 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.751622 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.753110 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:33 crc kubenswrapper[4863]: E1205 07:12:33.753155 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.771645 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lvrb5_da88ee86-9914-4396-bb33-d00d24b00c59/ovn-controller/0.log" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.771725 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.791795 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847082 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847147 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn4w2\" (UniqueName: \"kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847232 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847275 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847327 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847456 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run" (OuterVolumeSpecName: "var-run") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847504 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847515 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.847660 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn\") pod \"da88ee86-9914-4396-bb33-d00d24b00c59\" (UID: \"da88ee86-9914-4396-bb33-d00d24b00c59\") " Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.848776 4863 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.851228 4863 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.850330 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.857266 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts" (OuterVolumeSpecName: "scripts") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:33 crc kubenswrapper[4863]: I1205 07:12:33.857626 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.869710 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2" (OuterVolumeSpecName: "kube-api-access-cn4w2") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "kube-api-access-cn4w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.888326 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.924760 4863 scope.go:117] "RemoveContainer" containerID="98cd333e1c0d37adf112b6793afb04bdd8345ff143975ea68727af0c80a0fe6a" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.925568 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.933732 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.956285 4863 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/da88ee86-9914-4396-bb33-d00d24b00c59-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.956313 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn4w2\" (UniqueName: \"kubernetes.io/projected/da88ee86-9914-4396-bb33-d00d24b00c59-kube-api-access-cn4w2\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.956324 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da88ee86-9914-4396-bb33-d00d24b00c59-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.956333 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.968181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "da88ee86-9914-4396-bb33-d00d24b00c59" (UID: "da88ee86-9914-4396-bb33-d00d24b00c59"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:33.992605 4863 scope.go:117] "RemoveContainer" containerID="cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.001102 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.024854 4863 scope.go:117] "RemoveContainer" containerID="95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.055781 4863 scope.go:117] "RemoveContainer" containerID="cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.058183 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b\": container with ID starting with cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b not found: ID does not exist" containerID="cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.058223 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b"} err="failed to get container status \"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b\": rpc error: code = NotFound desc = could not find container \"cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b\": container with ID starting with cf4b0c93c1552059011d5eb28fc5bac4691ec629eb2f557e8d6dbe832a07357b not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.058247 4863 scope.go:117] "RemoveContainer" containerID="95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.058691 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e\": container with ID starting with 95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e not found: ID does not exist" containerID="95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.058708 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e"} err="failed to get container status \"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e\": rpc error: code = NotFound desc = could not find container \"95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e\": container with ID starting with 95719aae578663a279263a6c86488976d7508c0a1e50038c8be2c98af862793e not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059160 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059246 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059278 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89hmm\" (UniqueName: \"kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059303 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059322 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059351 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059443 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059625 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.059673 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle\") pod \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\" (UID: \"1a46c1ea-72b2-4dfd-a073-72f82617ce76\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.060057 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/da88ee86-9914-4396-bb33-d00d24b00c59-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.061587 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.061920 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs" (OuterVolumeSpecName: "logs") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.065489 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm" (OuterVolumeSpecName: "kube-api-access-89hmm") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "kube-api-access-89hmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.065882 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts" (OuterVolumeSpecName: "scripts") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.077690 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.100661 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.143987 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161417 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data" (OuterVolumeSpecName: "config-data") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161846 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161867 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161876 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161884 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161892 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89hmm\" (UniqueName: \"kubernetes.io/projected/1a46c1ea-72b2-4dfd-a073-72f82617ce76-kube-api-access-89hmm\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161900 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161909 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a46c1ea-72b2-4dfd-a073-72f82617ce76-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.161918 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a46c1ea-72b2-4dfd-a073-72f82617ce76-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.163382 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1a46c1ea-72b2-4dfd-a073-72f82617ce76" (UID: "1a46c1ea-72b2-4dfd-a073-72f82617ce76"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.261003 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758 is running failed: container process not found" containerID="5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.261490 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758 is running failed: container process not found" containerID="5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.261752 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758 is running failed: container process not found" containerID="5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.261788 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerName="nova-cell1-conductor-conductor" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.263034 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a46c1ea-72b2-4dfd-a073-72f82617ce76-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.272769 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.279051 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363730 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckhg5\" (UniqueName: \"kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363781 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363833 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363909 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363927 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363971 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.363987 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.364015 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.364592 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs" (OuterVolumeSpecName: "logs") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.364967 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbhbb\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.364984 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.365021 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.365044 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.365154 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle\") pod \"046f9d47-6b50-473f-838f-8375b6fe6389\" (UID: \"046f9d47-6b50-473f-838f-8375b6fe6389\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.365171 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.365195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd\") pod \"3201b201-3f68-4c2e-858c-56f0c8242c68\" (UID: \"3201b201-3f68-4c2e-858c-56f0c8242c68\") " Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.368700 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts" (OuterVolumeSpecName: "scripts") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.372225 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/046f9d47-6b50-473f-838f-8375b6fe6389-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.372250 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.372678 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.375243 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb" (OuterVolumeSpecName: "kube-api-access-xbhbb") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "kube-api-access-xbhbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.375577 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.391362 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.394331 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5" (OuterVolumeSpecName: "kube-api-access-ckhg5") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "kube-api-access-ckhg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.417497 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.417826 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-central-agent" containerID="cri-o://faa93203b4e74ae18536fcc22dae3d5e667b4f4cb6230251e992eb4cbb666258" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.417875 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="proxy-httpd" containerID="cri-o://7550e24c5734f210c7db5088ea7f7de89cf17da52278a4790f7d7af94780b9ee" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.417916 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="sg-core" containerID="cri-o://317ef2eee14e624241ec945a7fdc4c1afe943f1965f78feb5dc586e82047769c" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.417940 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-notification-agent" containerID="cri-o://8a42fab69785176997b7e2fb38ed78927e9b906b9e9eb6e7ad136bfb756c4118" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476244 4863 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476278 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbhbb\" (UniqueName: \"kubernetes.io/projected/3201b201-3f68-4c2e-858c-56f0c8242c68-kube-api-access-xbhbb\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476290 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476299 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3201b201-3f68-4c2e-858c-56f0c8242c68-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476310 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckhg5\" (UniqueName: \"kubernetes.io/projected/046f9d47-6b50-473f-838f-8375b6fe6389-kube-api-access-ckhg5\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.476813 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.477029 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="0765ebea-20ed-4ada-8031-3871a35e5f11" containerName="kube-state-metrics" containerID="cri-o://f6d81c1a4f657c6fae55a4c625c5b5be2de6ad49debaa852a7501310e581a784" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.563193 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.579951 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" containerName="memcached" containerID="cri-o://d4bdd29e9c370627b90ad1916b3bd8db2227c44957a9ebe1de4ac4bdd4fdd598" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.586048 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.615929 4863 generic.go:334] "Generic (PLEG): container finished" podID="046f9d47-6b50-473f-838f-8375b6fe6389" containerID="ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.616074 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-54d5bbb8cd-6z8nd" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.635491 4863 generic.go:334] "Generic (PLEG): container finished" podID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerID="45f9964a635141593c0ecd0b472b9d0197658218a0e29d30f979c3117ddf5090" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.640976 4863 generic.go:334] "Generic (PLEG): container finished" podID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerID="9ee5b5095f60b3fcf7b47a34050adc8fc3929d01b34264ecd58fb01bc25bd387" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.642079 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" path="/var/lib/kubelet/pods/79d93942-6ec7-4fea-9e05-a9c831ad3dd3/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.642906 4863 generic.go:334] "Generic (PLEG): container finished" podID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerID="021d8eac52f783b7221dafaa63c90c000b5e8040770750c9ddb21dc8b652ebeb" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.643567 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfff4892-c0b7-411f-9921-329db358dcde" path="/var/lib/kubelet/pods/cfff4892-c0b7-411f-9921-329db358dcde/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.644583 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" path="/var/lib/kubelet/pods/dcee685a-e1e9-4dd8-b04d-c5719c9bf771/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.645698 4863 generic.go:334] "Generic (PLEG): container finished" podID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerID="1fb7878f5faa41f5bfe6c62080b32d439375f5649baed85652cf33bd1cdfde23" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.645725 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data" (OuterVolumeSpecName: "config-data") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.646150 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd957476-007c-4882-8449-96deebe6a63c" path="/var/lib/kubelet/pods/fd957476-007c-4882-8449-96deebe6a63c/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.651748 4863 generic.go:334] "Generic (PLEG): container finished" podID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerID="8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.651924 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.659563 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0ed59-account-delete-v9rg9" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.660337 4863 generic.go:334] "Generic (PLEG): container finished" podID="afae6292-c1df-4dd3-abec-d1f493c03857" containerID="df76ce8ae2a6545ac0864b16219ff2541392407a17b3828ce1a57cca34eea46d" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661756 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-tjj7d"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661781 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerDied","Data":"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661808 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-54d5bbb8cd-6z8nd" event={"ID":"046f9d47-6b50-473f-838f-8375b6fe6389","Type":"ContainerDied","Data":"f90b9fe12eca7bbc176403ccf5bdcfcee30006ee7dd004d1993f14e7fd82d51e"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661824 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerDied","Data":"45f9964a635141593c0ecd0b472b9d0197658218a0e29d30f979c3117ddf5090"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661840 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerDied","Data":"9ee5b5095f60b3fcf7b47a34050adc8fc3929d01b34264ecd58fb01bc25bd387"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661855 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerDied","Data":"021d8eac52f783b7221dafaa63c90c000b5e8040770750c9ddb21dc8b652ebeb"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661870 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerDied","Data":"1fb7878f5faa41f5bfe6c62080b32d439375f5649baed85652cf33bd1cdfde23"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661885 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerDied","Data":"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661900 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a46c1ea-72b2-4dfd-a073-72f82617ce76","Type":"ContainerDied","Data":"549aae67303b6a18242583dd284cfc9f5114833b70853f3457867a22770c53bd"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0ed59-account-delete-v9rg9" event={"ID":"e15a3f00-fce6-490e-9b6b-ca28d8334d25","Type":"ContainerStarted","Data":"df94c2a716260a592e8e22b22317014b4322eb33fcee0a2adf82109c7651f931"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.661925 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement1771-account-delete-h4fq8" event={"ID":"afae6292-c1df-4dd3-abec-d1f493c03857","Type":"ContainerDied","Data":"df76ce8ae2a6545ac0864b16219ff2541392407a17b3828ce1a57cca34eea46d"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.662225 4863 scope.go:117] "RemoveContainer" containerID="ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.666812 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dlpwd"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.667333 4863 generic.go:334] "Generic (PLEG): container finished" podID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerID="5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.667374 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2","Type":"ContainerDied","Data":"5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.667706 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.671685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data" (OuterVolumeSpecName: "config-data") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.678801 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-tjj7d"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.679387 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.679963 4863 generic.go:334] "Generic (PLEG): container finished" podID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerID="2f4a46d4b57df2d6571ee3d4e278638e04cc4cea822ade66d2b1300772c45f1c" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.680035 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerDied","Data":"2f4a46d4b57df2d6571ee3d4e278638e04cc4cea822ade66d2b1300772c45f1c"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.683058 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.683090 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.683102 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.683111 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.683119 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.683915 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.683960 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts podName:e15a3f00-fce6-490e-9b6b-ca28d8334d25 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.183942835 +0000 UTC m=+1582.909939875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts") pod "novacell0ed59-account-delete-v9rg9" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.691172 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dlpwd"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.700069 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-ff9694557-gb857" event={"ID":"3201b201-3f68-4c2e-858c-56f0c8242c68","Type":"ContainerDied","Data":"e7f3886aba1af8ae1cb3a212d1e99200a5cd25ba247cc6a87638e031a97028a4"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.700112 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-ff9694557-gb857" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.700882 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "046f9d47-6b50-473f-838f-8375b6fe6389" (UID: "046f9d47-6b50-473f-838f-8375b6fe6389"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.702127 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.702296 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-558b46f87f-4r8fh" podUID="26d1df4f-5673-4b66-ad39-6da15197ef72" containerName="keystone-api" containerID="cri-o://954c1f5c6657fcb41a451ef64463595a73b0405b651fcdb2833bcc61d54b9090" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.702830 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lvrb5_da88ee86-9914-4396-bb33-d00d24b00c59/ovn-controller/0.log" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.703306 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapia811-account-delete-9vntl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.703851 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronadda-account-delete-wpdtm" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.705434 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6b9e-account-delete-7nrpl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.705792 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.705902 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/glanceddac-account-delete-hlrff" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.706070 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lvrb5" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.706146 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lvrb5" event={"ID":"da88ee86-9914-4396-bb33-d00d24b00c59","Type":"ContainerDied","Data":"644de7450b3852ae89f8355aff95db944adc56f03822c3d390aa43bdc74200e9"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.715701 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.720673 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3201b201-3f68-4c2e-858c-56f0c8242c68" (UID: "3201b201-3f68-4c2e-858c-56f0c8242c68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.726203 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell0ed59-account-delete-v9rg9" podStartSLOduration=6.726179029 podStartE2EDuration="6.726179029s" podCreationTimestamp="2025-12-05 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:12:34.677615382 +0000 UTC m=+1582.403612422" watchObservedRunningTime="2025-12-05 07:12:34.726179029 +0000 UTC m=+1582.452176059" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.749243 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-t6xdp"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.750942 4863 scope.go:117] "RemoveContainer" containerID="de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.759781 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-t6xdp"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.768432 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-50e2-account-create-update-vbhxj"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.785042 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.785069 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046f9d47-6b50-473f-838f-8375b6fe6389-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.785077 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3201b201-3f68-4c2e-858c-56f0c8242c68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.785590 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-50e2-account-create-update-vbhxj"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.785614 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.785654 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts podName:81284a21-5f4d-4135-b08e-94415569eb09 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.285641072 +0000 UTC m=+1583.011638112 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts") pod "novaapia811-account-delete-9vntl" (UID: "81284a21-5f4d-4135-b08e-94415569eb09") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.785867 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.785889 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts podName:d9d1ac32-bc45-41a0-b696-034ff92b13d4 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.285882158 +0000 UTC m=+1583.011879198 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts") pod "glanceddac-account-delete-hlrff" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.786057 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.786106 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts podName:7b21df5e-065a-4c62-b271-704c86b97f58 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.286087943 +0000 UTC m=+1583.012085073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts") pod "barbican6b9e-account-delete-7nrpl" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.786137 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.786158 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts podName:b5674946-023d-45c0-a0bf-373aa5d7ee65 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:35.286151574 +0000 UTC m=+1583.012148614 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts") pod "neutronadda-account-delete-wpdtm" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.788541 4863 scope.go:117] "RemoveContainer" containerID="ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.791301 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856\": container with ID starting with ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856 not found: ID does not exist" containerID="ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.791327 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856"} err="failed to get container status \"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856\": rpc error: code = NotFound desc = could not find container \"ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856\": container with ID starting with ee3971fa0217aafcccbb746dea335441c951fbbd51458bb2cf8fc5d010c78856 not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.791354 4863 scope.go:117] "RemoveContainer" containerID="de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.794098 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.794932 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531\": container with ID starting with de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531 not found: ID does not exist" containerID="de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.794978 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531"} err="failed to get container status \"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531\": rpc error: code = NotFound desc = could not find container \"de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531\": container with ID starting with de0e777f5ef4c26ae23174af180348a57891e6412ebeaabc549f5da6d42d2531 not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.795003 4863 scope.go:117] "RemoveContainer" containerID="8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.801801 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.812052 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.820489 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lvrb5"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.823853 4863 scope.go:117] "RemoveContainer" containerID="020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.851289 4863 scope.go:117] "RemoveContainer" containerID="8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.852156 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd\": container with ID starting with 8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd not found: ID does not exist" containerID="8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.852185 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd"} err="failed to get container status \"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd\": rpc error: code = NotFound desc = could not find container \"8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd\": container with ID starting with 8e94eaa3c3dbd043767bc1f6b003f4b26de478818ab43e9006718f0ceca062bd not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.852204 4863 scope.go:117] "RemoveContainer" containerID="020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:34.852452 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b\": container with ID starting with 020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b not found: ID does not exist" containerID="020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.852483 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b"} err="failed to get container status \"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b\": rpc error: code = NotFound desc = could not find container \"020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b\": container with ID starting with 020b6eebfde7ff82c738e6382083e6d0984f00b92812b01f7b030569a20e8c9b not found: ID does not exist" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.852496 4863 scope.go:117] "RemoveContainer" containerID="ac2c5273ab207bce433edc60614777727533cb3043b62817be91b5f725b80074" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.873585 4863 scope.go:117] "RemoveContainer" containerID="f96538aa3751fc9603b9dde8527729c4d78aba237e3063756b0682a46b12cadd" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.897404 4863 scope.go:117] "RemoveContainer" containerID="8bb9f0e24beab12e2880dde6ea3b6faf8156efe7d58338a5839bbe812a11bdfe" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.901086 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="galera" containerID="cri-o://a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" gracePeriod=30 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.956880 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:34.969483 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-54d5bbb8cd-6z8nd"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.041888 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.048507 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-ff9694557-gb857"] Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.191374 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.191467 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts podName:e15a3f00-fce6-490e-9b6b-ca28d8334d25 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.191444674 +0000 UTC m=+1583.917441714 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts") pod "novacell0ed59-account-delete-v9rg9" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293008 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293089 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts podName:81284a21-5f4d-4135-b08e-94415569eb09 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.293073479 +0000 UTC m=+1584.019070529 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts") pod "novaapia811-account-delete-9vntl" (UID: "81284a21-5f4d-4135-b08e-94415569eb09") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293542 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293579 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts podName:7b21df5e-065a-4c62-b271-704c86b97f58 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.29356762 +0000 UTC m=+1584.019564670 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts") pod "barbican6b9e-account-delete-7nrpl" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293616 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293653 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts podName:b5674946-023d-45c0-a0bf-373aa5d7ee65 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.293638603 +0000 UTC m=+1584.019635663 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts") pod "neutronadda-account-delete-wpdtm" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293697 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.293730 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts podName:d9d1ac32-bc45-41a0-b696-034ff92b13d4 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:36.293719545 +0000 UTC m=+1584.019716595 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts") pod "glanceddac-account-delete-hlrff" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.504576 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:35.504669 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data podName:c908ae8d-4ec2-4938-819c-0ba2ee26f209 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:43.50464673 +0000 UTC m=+1591.230643830 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data") pod "rabbitmq-cell1-server-0" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209") : configmap "rabbitmq-cell1-config-data" not found Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724165 4863 generic.go:334] "Generic (PLEG): container finished" podID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerID="7550e24c5734f210c7db5088ea7f7de89cf17da52278a4790f7d7af94780b9ee" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724494 4863 generic.go:334] "Generic (PLEG): container finished" podID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerID="317ef2eee14e624241ec945a7fdc4c1afe943f1965f78feb5dc586e82047769c" exitCode=2 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724504 4863 generic.go:334] "Generic (PLEG): container finished" podID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerID="faa93203b4e74ae18536fcc22dae3d5e667b4f4cb6230251e992eb4cbb666258" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724557 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerDied","Data":"7550e24c5734f210c7db5088ea7f7de89cf17da52278a4790f7d7af94780b9ee"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724589 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerDied","Data":"317ef2eee14e624241ec945a7fdc4c1afe943f1965f78feb5dc586e82047769c"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.724602 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerDied","Data":"faa93203b4e74ae18536fcc22dae3d5e667b4f4cb6230251e992eb4cbb666258"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.727182 4863 generic.go:334] "Generic (PLEG): container finished" podID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerID="f36ebe2df557c66ab7206b7a13a758e1a097e24e728e0632ac580409fc3c1b99" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.727229 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronadda-account-delete-wpdtm" event={"ID":"b5674946-023d-45c0-a0bf-373aa5d7ee65","Type":"ContainerDied","Data":"f36ebe2df557c66ab7206b7a13a758e1a097e24e728e0632ac580409fc3c1b99"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.727827 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronadda-account-delete-wpdtm" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.727855 4863 scope.go:117] "RemoveContainer" containerID="f36ebe2df557c66ab7206b7a13a758e1a097e24e728e0632ac580409fc3c1b99" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.744127 4863 generic.go:334] "Generic (PLEG): container finished" podID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerID="df94c2a716260a592e8e22b22317014b4322eb33fcee0a2adf82109c7651f931" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.744233 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0ed59-account-delete-v9rg9" event={"ID":"e15a3f00-fce6-490e-9b6b-ca28d8334d25","Type":"ContainerDied","Data":"df94c2a716260a592e8e22b22317014b4322eb33fcee0a2adf82109c7651f931"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.744722 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0ed59-account-delete-v9rg9" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.744766 4863 scope.go:117] "RemoveContainer" containerID="df94c2a716260a592e8e22b22317014b4322eb33fcee0a2adf82109c7651f931" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.749286 4863 generic.go:334] "Generic (PLEG): container finished" podID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerID="f2662da293cfbe287c207bdc56fe94abd416d491d01023463a804b9131cd7e2e" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.749389 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceddac-account-delete-hlrff" event={"ID":"d9d1ac32-bc45-41a0-b696-034ff92b13d4","Type":"ContainerDied","Data":"f2662da293cfbe287c207bdc56fe94abd416d491d01023463a804b9131cd7e2e"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.750421 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/glanceddac-account-delete-hlrff" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.750454 4863 scope.go:117] "RemoveContainer" containerID="f2662da293cfbe287c207bdc56fe94abd416d491d01023463a804b9131cd7e2e" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.760580 4863 generic.go:334] "Generic (PLEG): container finished" podID="7b21df5e-065a-4c62-b271-704c86b97f58" containerID="05cf9d310f6f5930534f91e8433cfadec4cac29f103b9bff690bd665203c5d9f" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.760625 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6b9e-account-delete-7nrpl" event={"ID":"7b21df5e-065a-4c62-b271-704c86b97f58","Type":"ContainerDied","Data":"05cf9d310f6f5930534f91e8433cfadec4cac29f103b9bff690bd665203c5d9f"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.761332 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6b9e-account-delete-7nrpl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.761381 4863 scope.go:117] "RemoveContainer" containerID="05cf9d310f6f5930534f91e8433cfadec4cac29f103b9bff690bd665203c5d9f" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.764854 4863 generic.go:334] "Generic (PLEG): container finished" podID="0765ebea-20ed-4ada-8031-3871a35e5f11" containerID="f6d81c1a4f657c6fae55a4c625c5b5be2de6ad49debaa852a7501310e581a784" exitCode=2 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.764911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0765ebea-20ed-4ada-8031-3871a35e5f11","Type":"ContainerDied","Data":"f6d81c1a4f657c6fae55a4c625c5b5be2de6ad49debaa852a7501310e581a784"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.772454 4863 generic.go:334] "Generic (PLEG): container finished" podID="81284a21-5f4d-4135-b08e-94415569eb09" containerID="ab248ea2b3fb7716d112afdff0208e09af5de52fa6ae81ec6d34fd454fabcb0c" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.772549 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapia811-account-delete-9vntl" event={"ID":"81284a21-5f4d-4135-b08e-94415569eb09","Type":"ContainerDied","Data":"ab248ea2b3fb7716d112afdff0208e09af5de52fa6ae81ec6d34fd454fabcb0c"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.773133 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapia811-account-delete-9vntl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.773163 4863 scope.go:117] "RemoveContainer" containerID="ab248ea2b3fb7716d112afdff0208e09af5de52fa6ae81ec6d34fd454fabcb0c" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.777861 4863 generic.go:334] "Generic (PLEG): container finished" podID="d847c56f-38ef-4aaf-a974-b347f5091038" containerID="75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:35.777970 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d847c56f-38ef-4aaf-a974-b347f5091038","Type":"ContainerDied","Data":"75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6"} Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.115872 4863 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.115939 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data podName:46586650-4568-4f5e-9854-30f6e0291b6b nodeName:}" failed. No retries permitted until 2025-12-05 07:12:44.115921716 +0000 UTC m=+1591.841918756 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data") pod "rabbitmq-server-0" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b") : configmap "rabbitmq-config-data" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.217231 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.217551 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts podName:e15a3f00-fce6-490e-9b6b-ca28d8334d25 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:38.217517829 +0000 UTC m=+1585.943514869 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts") pod "novacell0ed59-account-delete-v9rg9" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.323747 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.323830 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts podName:81284a21-5f4d-4135-b08e-94415569eb09 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:38.323811527 +0000 UTC m=+1586.049808577 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts") pod "novaapia811-account-delete-9vntl" (UID: "81284a21-5f4d-4135-b08e-94415569eb09") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.323920 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.324000 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts podName:b5674946-023d-45c0-a0bf-373aa5d7ee65 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:38.323977231 +0000 UTC m=+1586.049974331 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts") pod "neutronadda-account-delete-wpdtm" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.324043 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.324068 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts podName:7b21df5e-065a-4c62-b271-704c86b97f58 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:38.324060313 +0000 UTC m=+1586.050057463 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts") pod "barbican6b9e-account-delete-7nrpl" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.324120 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.324136 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts podName:d9d1ac32-bc45-41a0-b696-034ff92b13d4 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:38.324131356 +0000 UTC m=+1586.050128396 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts") pod "glanceddac-account-delete-hlrff" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4") : configmap "openstack-scripts" not found Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.565079 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.195:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.620436 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" path="/var/lib/kubelet/pods/046f9d47-6b50-473f-838f-8375b6fe6389/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.621681 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05b513fa-8a27-433e-85f1-7d252e0e6050" path="/var/lib/kubelet/pods/05b513fa-8a27-433e-85f1-7d252e0e6050/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.623206 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ef1f2f1-5905-4a67-9a5a-9d42f9dce122" path="/var/lib/kubelet/pods/0ef1f2f1-5905-4a67-9a5a-9d42f9dce122/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.624586 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" path="/var/lib/kubelet/pods/1a46c1ea-72b2-4dfd-a073-72f82617ce76/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.625444 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="298bab26-0793-4998-a6da-b4df8db6ee59" path="/var/lib/kubelet/pods/298bab26-0793-4998-a6da-b4df8db6ee59/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.626278 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" path="/var/lib/kubelet/pods/3201b201-3f68-4c2e-858c-56f0c8242c68/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.627534 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c5388c0-d809-4a83-a70d-ac33db4f5123" path="/var/lib/kubelet/pods/5c5388c0-d809-4a83-a70d-ac33db4f5123/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.628118 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" path="/var/lib/kubelet/pods/da88ee86-9914-4396-bb33-d00d24b00c59/volumes" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.832967 4863 generic.go:334] "Generic (PLEG): container finished" podID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerID="fa5daef059bea9be99399c23fdbffecb7ff93208b4415853aee9526be0c253b4" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.833389 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceddac-account-delete-hlrff" event={"ID":"d9d1ac32-bc45-41a0-b696-034ff92b13d4","Type":"ContainerDied","Data":"fa5daef059bea9be99399c23fdbffecb7ff93208b4415853aee9526be0c253b4"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.833429 4863 scope.go:117] "RemoveContainer" containerID="f2662da293cfbe287c207bdc56fe94abd416d491d01023463a804b9131cd7e2e" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.834138 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/glanceddac-account-delete-hlrff" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.834167 4863 scope.go:117] "RemoveContainer" containerID="fa5daef059bea9be99399c23fdbffecb7ff93208b4415853aee9526be0c253b4" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.834529 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=glanceddac-account-delete-hlrff_openstack(d9d1ac32-bc45-41a0-b696-034ff92b13d4)\"" pod="openstack/glanceddac-account-delete-hlrff" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.848532 4863 generic.go:334] "Generic (PLEG): container finished" podID="7b21df5e-065a-4c62-b271-704c86b97f58" containerID="7bb6b44e29c07ac7ad2848e995dffeb54c63a3e75e8fcb40dd8dabf5b26b9a14" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.848599 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6b9e-account-delete-7nrpl" event={"ID":"7b21df5e-065a-4c62-b271-704c86b97f58","Type":"ContainerDied","Data":"7bb6b44e29c07ac7ad2848e995dffeb54c63a3e75e8fcb40dd8dabf5b26b9a14"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.849202 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6b9e-account-delete-7nrpl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.849245 4863 scope.go:117] "RemoveContainer" containerID="7bb6b44e29c07ac7ad2848e995dffeb54c63a3e75e8fcb40dd8dabf5b26b9a14" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.849454 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=barbican6b9e-account-delete-7nrpl_openstack(7b21df5e-065a-4c62-b271-704c86b97f58)\"" pod="openstack/barbican6b9e-account-delete-7nrpl" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.851625 4863 generic.go:334] "Generic (PLEG): container finished" podID="d4703140-cc56-4eb8-b06b-1033916a839f" containerID="d4bdd29e9c370627b90ad1916b3bd8db2227c44957a9ebe1de4ac4bdd4fdd598" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.851668 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d4703140-cc56-4eb8-b06b-1033916a839f","Type":"ContainerDied","Data":"d4bdd29e9c370627b90ad1916b3bd8db2227c44957a9ebe1de4ac4bdd4fdd598"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.853187 4863 generic.go:334] "Generic (PLEG): container finished" podID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerID="b604a52142e9d9f908783605b6df24046daf096d058499b941bcd279570ec906" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.853230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronadda-account-delete-wpdtm" event={"ID":"b5674946-023d-45c0-a0bf-373aa5d7ee65","Type":"ContainerDied","Data":"b604a52142e9d9f908783605b6df24046daf096d058499b941bcd279570ec906"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.853570 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronadda-account-delete-wpdtm" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.853595 4863 scope.go:117] "RemoveContainer" containerID="b604a52142e9d9f908783605b6df24046daf096d058499b941bcd279570ec906" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.853770 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=neutronadda-account-delete-wpdtm_openstack(b5674946-023d-45c0-a0bf-373aa5d7ee65)\"" pod="openstack/neutronadda-account-delete-wpdtm" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.863114 4863 generic.go:334] "Generic (PLEG): container finished" podID="81284a21-5f4d-4135-b08e-94415569eb09" containerID="e18bc44de06fc508590f1a10ba71eeaed8e5f29bae81bf79ed858ecc31de0d96" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.863188 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapia811-account-delete-9vntl" event={"ID":"81284a21-5f4d-4135-b08e-94415569eb09","Type":"ContainerDied","Data":"e18bc44de06fc508590f1a10ba71eeaed8e5f29bae81bf79ed858ecc31de0d96"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.864241 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapia811-account-delete-9vntl" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.864283 4863 scope.go:117] "RemoveContainer" containerID="e18bc44de06fc508590f1a10ba71eeaed8e5f29bae81bf79ed858ecc31de0d96" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.864608 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novaapia811-account-delete-9vntl_openstack(81284a21-5f4d-4135-b08e-94415569eb09)\"" pod="openstack/novaapia811-account-delete-9vntl" podUID="81284a21-5f4d-4135-b08e-94415569eb09" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.867241 4863 generic.go:334] "Generic (PLEG): container finished" podID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerID="d71782c7a5ffb55ae64289ad77c98d1cb841124b3e5fa434d89d4e25da13e56a" exitCode=1 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.867286 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0ed59-account-delete-v9rg9" event={"ID":"e15a3f00-fce6-490e-9b6b-ca28d8334d25","Type":"ContainerDied","Data":"d71782c7a5ffb55ae64289ad77c98d1cb841124b3e5fa434d89d4e25da13e56a"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.867623 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0ed59-account-delete-v9rg9" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.867649 4863 scope.go:117] "RemoveContainer" containerID="d71782c7a5ffb55ae64289ad77c98d1cb841124b3e5fa434d89d4e25da13e56a" Dec 05 07:12:36 crc kubenswrapper[4863]: E1205 07:12:36.867815 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novacell0ed59-account-delete-v9rg9_openstack(e15a3f00-fce6-490e-9b6b-ca28d8334d25)\"" pod="openstack/novacell0ed59-account-delete-v9rg9" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.869677 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_51b09f18-7196-4b58-b4a9-29671ae5a243/ovn-northd/0.log" Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.869705 4863 generic.go:334] "Generic (PLEG): container finished" podID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" exitCode=139 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.869737 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerDied","Data":"8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35"} Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.870937 4863 generic.go:334] "Generic (PLEG): container finished" podID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerID="60b413aacbb400385f318f7b88f361cca235df215614238ffe8edc91b3a7e32f" exitCode=0 Dec 05 07:12:36 crc kubenswrapper[4863]: I1205 07:12:36.870959 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerDied","Data":"60b413aacbb400385f318f7b88f361cca235df215614238ffe8edc91b3a7e32f"} Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.205423 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.206339 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.206557 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.206923 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.206949 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.210333 4863 scope.go:117] "RemoveContainer" containerID="05cf9d310f6f5930534f91e8433cfadec4cac29f103b9bff690bd665203c5d9f" Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.214255 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.229430 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.230077 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.230135 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.318302 4863 scope.go:117] "RemoveContainer" containerID="f36ebe2df557c66ab7206b7a13a758e1a097e24e728e0632ac580409fc3c1b99" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.338114 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358267 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358287 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358340 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358408 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358424 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l44xw\" (UniqueName: \"kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358462 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.358551 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0752f5c6-12cf-4208-b523-f970b63f1b4b\" (UID: \"0752f5c6-12cf-4208-b523-f970b63f1b4b\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.359741 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs" (OuterVolumeSpecName: "logs") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.364597 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.372206 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.375318 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts" (OuterVolumeSpecName: "scripts") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.375700 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw" (OuterVolumeSpecName: "kube-api-access-l44xw") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "kube-api-access-l44xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.469050 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.469788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.469906 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrqd5\" (UniqueName: \"kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470007 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470025 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470062 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470081 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs\") pod \"92c796ad-a73e-4924-a59f-05031fcbb9d0\" (UID: \"92c796ad-a73e-4924-a59f-05031fcbb9d0\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470802 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470823 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470833 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470841 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l44xw\" (UniqueName: \"kubernetes.io/projected/0752f5c6-12cf-4208-b523-f970b63f1b4b-kube-api-access-l44xw\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.470851 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0752f5c6-12cf-4208-b523-f970b63f1b4b-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.471975 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs" (OuterVolumeSpecName: "logs") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.477836 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5" (OuterVolumeSpecName: "kube-api-access-vrqd5") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "kube-api-access-vrqd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.493184 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.548890 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.572752 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.572785 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c796ad-a73e-4924-a59f-05031fcbb9d0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.572795 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrqd5\" (UniqueName: \"kubernetes.io/projected/92c796ad-a73e-4924-a59f-05031fcbb9d0-kube-api-access-vrqd5\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.572805 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.605600 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.613805 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.623386 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data" (OuterVolumeSpecName: "config-data") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.626660 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.629620 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0752f5c6-12cf-4208-b523-f970b63f1b4b" (UID: "0752f5c6-12cf-4208-b523-f970b63f1b4b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.632521 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.632742 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.640253 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35 is running failed: container process not found" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.640707 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35 is running failed: container process not found" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.641098 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35 is running failed: container process not found" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 07:12:37 crc kubenswrapper[4863]: E1205 07:12:37.641128 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.642020 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.651997 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.663335 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data" (OuterVolumeSpecName: "config-data") pod "92c796ad-a73e-4924-a59f-05031fcbb9d0" (UID: "92c796ad-a73e-4924-a59f-05031fcbb9d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.669887 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_51b09f18-7196-4b58-b4a9-29671ae5a243/ovn-northd/0.log" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.669950 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675450 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675498 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675512 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675524 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675536 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675546 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c796ad-a73e-4924-a59f-05031fcbb9d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.675554 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0752f5c6-12cf-4208-b523-f970b63f1b4b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.694672 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.697043 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.777964 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle\") pod \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778027 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778054 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data\") pod \"3409451f-c36d-4577-8720-89f4b6dd5ec5\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778083 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs\") pod \"3409451f-c36d-4577-8720-89f4b6dd5ec5\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778149 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778191 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t49s\" (UniqueName: \"kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s\") pod \"3409451f-c36d-4577-8720-89f4b6dd5ec5\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778216 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778241 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778301 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778336 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k556n\" (UniqueName: \"kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n\") pod \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778359 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb6dc\" (UniqueName: \"kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778383 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2w4t\" (UniqueName: \"kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t\") pod \"afae6292-c1df-4dd3-abec-d1f493c03857\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778414 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs\") pod \"3409451f-c36d-4577-8720-89f4b6dd5ec5\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778483 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle\") pod \"3409451f-c36d-4577-8720-89f4b6dd5ec5\" (UID: \"3409451f-c36d-4577-8720-89f4b6dd5ec5\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778509 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts\") pod \"afae6292-c1df-4dd3-abec-d1f493c03857\" (UID: \"afae6292-c1df-4dd3-abec-d1f493c03857\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778555 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts\") pod \"51b09f18-7196-4b58-b4a9-29671ae5a243\" (UID: \"51b09f18-7196-4b58-b4a9-29671ae5a243\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.778666 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data\") pod \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\" (UID: \"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.782221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs" (OuterVolumeSpecName: "logs") pod "3409451f-c36d-4577-8720-89f4b6dd5ec5" (UID: "3409451f-c36d-4577-8720-89f4b6dd5ec5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.791386 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t" (OuterVolumeSpecName: "kube-api-access-p2w4t") pod "afae6292-c1df-4dd3-abec-d1f493c03857" (UID: "afae6292-c1df-4dd3-abec-d1f493c03857"). InnerVolumeSpecName "kube-api-access-p2w4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.796940 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.797245 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n" (OuterVolumeSpecName: "kube-api-access-k556n") pod "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" (UID: "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2"). InnerVolumeSpecName "kube-api-access-k556n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.797334 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "afae6292-c1df-4dd3-abec-d1f493c03857" (UID: "afae6292-c1df-4dd3-abec-d1f493c03857"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.800786 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts" (OuterVolumeSpecName: "scripts") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.803702 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config" (OuterVolumeSpecName: "config") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.808342 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc" (OuterVolumeSpecName: "kube-api-access-rb6dc") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "kube-api-access-rb6dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.823157 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s" (OuterVolumeSpecName: "kube-api-access-5t49s") pod "3409451f-c36d-4577-8720-89f4b6dd5ec5" (UID: "3409451f-c36d-4577-8720-89f4b6dd5ec5"). InnerVolumeSpecName "kube-api-access-5t49s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.829119 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6zdth"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.833354 4863 scope.go:117] "RemoveContainer" containerID="ab248ea2b3fb7716d112afdff0208e09af5de52fa6ae81ec6d34fd454fabcb0c" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.836637 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.849945 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6zdth"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.852955 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3409451f-c36d-4577-8720-89f4b6dd5ec5" (UID: "3409451f-c36d-4577-8720-89f4b6dd5ec5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.857783 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data" (OuterVolumeSpecName: "config-data") pod "3409451f-c36d-4577-8720-89f4b6dd5ec5" (UID: "3409451f-c36d-4577-8720-89f4b6dd5ec5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.867006 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.872976 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.877636 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data" (OuterVolumeSpecName: "config-data") pod "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" (UID: "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.877655 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" (UID: "90f6dc4d-a9ff-4bf7-a44f-0074c56130d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881083 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs\") pod \"0765ebea-20ed-4ada-8031-3871a35e5f11\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881129 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881151 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881205 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config\") pod \"0765ebea-20ed-4ada-8031-3871a35e5f11\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881303 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle\") pod \"0765ebea-20ed-4ada-8031-3871a35e5f11\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881329 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881361 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbggp\" (UniqueName: \"kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp\") pod \"0765ebea-20ed-4ada-8031-3871a35e5f11\" (UID: \"0765ebea-20ed-4ada-8031-3871a35e5f11\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881380 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsvsq\" (UniqueName: \"kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881414 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881430 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881453 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"5c1e2892-03e9-4f09-84ce-0c91842108cc\" (UID: \"5c1e2892-03e9-4f09-84ce-0c91842108cc\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881816 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881826 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3409451f-c36d-4577-8720-89f4b6dd5ec5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881835 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881843 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t49s\" (UniqueName: \"kubernetes.io/projected/3409451f-c36d-4577-8720-89f4b6dd5ec5-kube-api-access-5t49s\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881853 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881861 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881871 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k556n\" (UniqueName: \"kubernetes.io/projected/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-kube-api-access-k556n\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881878 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb6dc\" (UniqueName: \"kubernetes.io/projected/51b09f18-7196-4b58-b4a9-29671ae5a243-kube-api-access-rb6dc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881886 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2w4t\" (UniqueName: \"kubernetes.io/projected/afae6292-c1df-4dd3-abec-d1f493c03857-kube-api-access-p2w4t\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881894 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881904 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afae6292-c1df-4dd3-abec-d1f493c03857-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881913 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51b09f18-7196-4b58-b4a9-29671ae5a243-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881921 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.881929 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.884550 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts" (OuterVolumeSpecName: "scripts") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.885269 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.885630 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-d676-account-create-update-nvl4v"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.889378 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-d676-account-create-update-nvl4v"] Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.890681 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp" (OuterVolumeSpecName: "kube-api-access-qbggp") pod "0765ebea-20ed-4ada-8031-3871a35e5f11" (UID: "0765ebea-20ed-4ada-8031-3871a35e5f11"). InnerVolumeSpecName "kube-api-access-qbggp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.891071 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs" (OuterVolumeSpecName: "logs") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.891824 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.895721 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq" (OuterVolumeSpecName: "kube-api-access-fsvsq") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "kube-api-access-fsvsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.896713 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.897731 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d4703140-cc56-4eb8-b06b-1033916a839f","Type":"ContainerDied","Data":"a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.897769 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a79206c19748560e7c43fd4a9c9d5d776206143ee3e536a4347423d3b0f3d210" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.898173 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3409451f-c36d-4577-8720-89f4b6dd5ec5" (UID: "3409451f-c36d-4577-8720-89f4b6dd5ec5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.899993 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c1e2892-03e9-4f09-84ce-0c91842108cc","Type":"ContainerDied","Data":"da5d5b4769a7f3a761cd294e9b7bfa4dad907aaa1a1e4ba0ae60a08d55e83ee5"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.900105 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.907546 4863 generic.go:334] "Generic (PLEG): container finished" podID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerID="de7ae6772610824286f4a36b1feda8b4ab485b788e3f580e7258992e82f32077" exitCode=0 Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.907617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerDied","Data":"de7ae6772610824286f4a36b1feda8b4ab485b788e3f580e7258992e82f32077"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.908750 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.930929 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderd676-account-delete-7ks4z" event={"ID":"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1","Type":"ContainerDied","Data":"56af4b56fba7f0f76551dcdec90ee833fa473dbff47da617d0e51b0416687089"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.930979 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56af4b56fba7f0f76551dcdec90ee833fa473dbff47da617d0e51b0416687089" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.931165 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderd676-account-delete-7ks4z" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.945658 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.945970 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.946226 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "51b09f18-7196-4b58-b4a9-29671ae5a243" (UID: "51b09f18-7196-4b58-b4a9-29671ae5a243"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.946411 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0752f5c6-12cf-4208-b523-f970b63f1b4b","Type":"ContainerDied","Data":"72ec4d046f3f5ad9eff16adc698fc45b4acd74fdb27c848fc1acb11517c892e1"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.946522 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.961938 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-776db75b76-jmjll" event={"ID":"92c796ad-a73e-4924-a59f-05031fcbb9d0","Type":"ContainerDied","Data":"3f24d24ddecde0c03610f95481ed132c7db37024414b9adb37509ec427cedc45"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.961963 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-776db75b76-jmjll" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.964352 4863 generic.go:334] "Generic (PLEG): container finished" podID="46586650-4568-4f5e-9854-30f6e0291b6b" containerID="7d926cefd2b8f2782d82e3be6c2ab3724e1e4e1d45592a80014e9eb9c83211c5" exitCode=0 Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.964371 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerDied","Data":"7d926cefd2b8f2782d82e3be6c2ab3724e1e4e1d45592a80014e9eb9c83211c5"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.967147 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46586650-4568-4f5e-9854-30f6e0291b6b","Type":"ContainerDied","Data":"12c440764f9c634198aff9aab51c829761d52ec3201b3e8fdf93d5674796ab5f"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.967184 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12c440764f9c634198aff9aab51c829761d52ec3201b3e8fdf93d5674796ab5f" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.967702 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.968323 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90f6dc4d-a9ff-4bf7-a44f-0074c56130d2","Type":"ContainerDied","Data":"271765f99b986d264d9c40cd718d08fc4a22ef62ec3a210144c4a50ea66e096b"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.968387 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.977817 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "0765ebea-20ed-4ada-8031-3871a35e5f11" (UID: "0765ebea-20ed-4ada-8031-3871a35e5f11"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.981207 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_51b09f18-7196-4b58-b4a9-29671ae5a243/ovn-northd/0.log" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.981320 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"51b09f18-7196-4b58-b4a9-29671ae5a243","Type":"ContainerDied","Data":"f4ac7c4a0753d5fd636beac6d2322223616c09dccb5b89e6b1bce687a4529b62"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.981330 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983187 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983347 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983373 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983422 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983562 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.983585 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c84h\" (UniqueName: \"kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h\") pod \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\" (UID: \"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c\") " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984040 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984076 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984089 4863 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3409451f-c36d-4577-8720-89f4b6dd5ec5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984103 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984115 4863 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984128 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984139 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c1e2892-03e9-4f09-84ce-0c91842108cc-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984151 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbggp\" (UniqueName: \"kubernetes.io/projected/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-api-access-qbggp\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984163 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsvsq\" (UniqueName: \"kubernetes.io/projected/5c1e2892-03e9-4f09-84ce-0c91842108cc-kube-api-access-fsvsq\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.984173 4863 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/51b09f18-7196-4b58-b4a9-29671ae5a243-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.987063 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.988815 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3409451f-c36d-4577-8720-89f4b6dd5ec5","Type":"ContainerDied","Data":"3ca4cffe58a91e72355ada98e05e32f145f842cea518e3bbea4d306abb6d490a"} Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.989487 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs" (OuterVolumeSpecName: "logs") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.989587 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 07:12:37 crc kubenswrapper[4863]: I1205 07:12:37.995559 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.002979 4863 generic.go:334] "Generic (PLEG): container finished" podID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerID="f9cbbd75cf4e441651ca07035330ebfbbf0163c249a21a5f6b8d9986b0edde6a" exitCode=0 Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.003055 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerDied","Data":"f9cbbd75cf4e441651ca07035330ebfbbf0163c249a21a5f6b8d9986b0edde6a"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.029982 4863 scope.go:117] "RemoveContainer" containerID="df94c2a716260a592e8e22b22317014b4322eb33fcee0a2adf82109c7651f931" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.034131 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement1771-account-delete-h4fq8" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.034410 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement1771-account-delete-h4fq8" event={"ID":"afae6292-c1df-4dd3-abec-d1f493c03857","Type":"ContainerDied","Data":"b845b978cf5b213dcc4db68e78e700da10c02ad74a2cd85998ebd530a99ed44c"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.034446 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b845b978cf5b213dcc4db68e78e700da10c02ad74a2cd85998ebd530a99ed44c" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.052615 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h" (OuterVolumeSpecName: "kube-api-access-2c84h") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "kube-api-access-2c84h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.068371 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.074540 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.078193 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0765ebea-20ed-4ada-8031-3871a35e5f11" (UID: "0765ebea-20ed-4ada-8031-3871a35e5f11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.078193 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.081023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c","Type":"ContainerDied","Data":"f987a136754028cb01eec218aa516f67724f78f8baf0aff8113053bf15c6ef65"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.081651 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.083140 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-xtjxf"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085180 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085229 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-876mg\" (UniqueName: \"kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg\") pod \"d847c56f-38ef-4aaf-a974-b347f5091038\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085248 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085265 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085283 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085304 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085320 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085338 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085359 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085386 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085419 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts\") pod \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085450 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085516 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs\") pod \"d4703140-cc56-4eb8-b06b-1033916a839f\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085545 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085575 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085598 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085620 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnlrt\" (UniqueName: \"kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt\") pod \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\" (UID: \"31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085676 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data\") pod \"d4703140-cc56-4eb8-b06b-1033916a839f\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085704 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085748 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbqfq\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085802 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085836 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085871 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47zl5\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085895 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085915 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrnpc\" (UniqueName: \"kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc\") pod \"d4703140-cc56-4eb8-b06b-1033916a839f\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.085995 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data\") pod \"d847c56f-38ef-4aaf-a974-b347f5091038\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.086030 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle\") pod \"d847c56f-38ef-4aaf-a974-b347f5091038\" (UID: \"d847c56f-38ef-4aaf-a974-b347f5091038\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.086056 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret\") pod \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\" (UID: \"c908ae8d-4ec2-4938-819c-0ba2ee26f209\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.086110 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config\") pod \"d4703140-cc56-4eb8-b06b-1033916a839f\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.088173 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.088212 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle\") pod \"d4703140-cc56-4eb8-b06b-1033916a839f\" (UID: \"d4703140-cc56-4eb8-b06b-1033916a839f\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.088239 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie\") pod \"46586650-4568-4f5e-9854-30f6e0291b6b\" (UID: \"46586650-4568-4f5e-9854-30f6e0291b6b\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089574 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089604 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089618 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089633 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c84h\" (UniqueName: \"kubernetes.io/projected/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-kube-api-access-2c84h\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089649 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.089661 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.094591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0765ebea-20ed-4ada-8031-3871a35e5f11","Type":"ContainerDied","Data":"677ea7e1d8171770713d9ce886e2267dfafbdf296ee2b1383fbe1ffa40227dd7"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.086743 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.086783 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data" (OuterVolumeSpecName: "config-data") pod "d4703140-cc56-4eb8-b06b-1033916a839f" (UID: "d4703140-cc56-4eb8-b06b-1033916a839f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.087144 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.087724 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.088240 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" (UID: "31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.088709 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.090099 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.094913 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "d4703140-cc56-4eb8-b06b-1033916a839f" (UID: "d4703140-cc56-4eb8-b06b-1033916a839f"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.097520 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.100761 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.101077 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.106819 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.106988 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.107002 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c908ae8d-4ec2-4938-819c-0ba2ee26f209","Type":"ContainerDied","Data":"fdd0eb043ffbf6bf9452631cf74ead66e8377eb3eafd10db25b76df07f32d7be"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.107095 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.112669 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data" (OuterVolumeSpecName: "config-data") pod "5c1e2892-03e9-4f09-84ce-0c91842108cc" (UID: "5c1e2892-03e9-4f09-84ce-0c91842108cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.112765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d847c56f-38ef-4aaf-a974-b347f5091038","Type":"ContainerDied","Data":"fcc66e85462c1fe5e8b7294c7f38eb5e87573c0489caa265f6e259472be38d04"} Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.112886 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.112894 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.113977 4863 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0ed59-account-delete-v9rg9" secret="" err="secret \"galera-openstack-dockercfg-ws7fv\" not found" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.114073 4863 scope.go:117] "RemoveContainer" containerID="d71782c7a5ffb55ae64289ad77c98d1cb841124b3e5fa434d89d4e25da13e56a" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.114711 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq" (OuterVolumeSpecName: "kube-api-access-pbqfq") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "kube-api-access-pbqfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.114941 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novacell0ed59-account-delete-v9rg9_openstack(e15a3f00-fce6-490e-9b6b-ca28d8334d25)\"" pod="openstack/novacell0ed59-account-delete-v9rg9" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.114986 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-xtjxf"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.115883 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info" (OuterVolumeSpecName: "pod-info") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.116711 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.116928 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt" (OuterVolumeSpecName: "kube-api-access-vnlrt") pod "31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" (UID: "31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1"). InnerVolumeSpecName "kube-api-access-vnlrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.117385 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info" (OuterVolumeSpecName: "pod-info") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.119759 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg" (OuterVolumeSpecName: "kube-api-access-876mg") pod "d847c56f-38ef-4aaf-a974-b347f5091038" (UID: "d847c56f-38ef-4aaf-a974-b347f5091038"). InnerVolumeSpecName "kube-api-access-876mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.126582 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.129525 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data" (OuterVolumeSpecName: "config-data") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.134349 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.134485 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc" (OuterVolumeSpecName: "kube-api-access-nrnpc") pod "d4703140-cc56-4eb8-b06b-1033916a839f" (UID: "d4703140-cc56-4eb8-b06b-1033916a839f"). InnerVolumeSpecName "kube-api-access-nrnpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.145416 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5" (OuterVolumeSpecName: "kube-api-access-47zl5") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "kube-api-access-47zl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.156180 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-adda-account-create-update-hj2tr"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.175387 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "0765ebea-20ed-4ada-8031-3871a35e5f11" (UID: "0765ebea-20ed-4ada-8031-3871a35e5f11"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191654 4863 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46586650-4568-4f5e-9854-30f6e0291b6b-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191704 4863 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c908ae8d-4ec2-4938-819c-0ba2ee26f209-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191717 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191730 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnlrt\" (UniqueName: \"kubernetes.io/projected/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-kube-api-access-vnlrt\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191742 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191766 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191777 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbqfq\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-kube-api-access-pbqfq\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191789 4863 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191801 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47zl5\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-kube-api-access-47zl5\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191812 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrnpc\" (UniqueName: \"kubernetes.io/projected/d4703140-cc56-4eb8-b06b-1033916a839f-kube-api-access-nrnpc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191825 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191836 4863 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c908ae8d-4ec2-4938-819c-0ba2ee26f209-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191847 4863 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d4703140-cc56-4eb8-b06b-1033916a839f-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191858 4863 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0765ebea-20ed-4ada-8031-3871a35e5f11-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191882 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c1e2892-03e9-4f09-84ce-0c91842108cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191896 4863 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46586650-4568-4f5e-9854-30f6e0291b6b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191908 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191919 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191931 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-876mg\" (UniqueName: \"kubernetes.io/projected/d847c56f-38ef-4aaf-a974-b347f5091038-kube-api-access-876mg\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191951 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191973 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191984 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.191995 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.192004 4863 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.192014 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.194648 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-adda-account-create-update-hj2tr"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.199314 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data" (OuterVolumeSpecName: "config-data") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.201572 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4703140-cc56-4eb8-b06b-1033916a839f" (UID: "d4703140-cc56-4eb8-b06b-1033916a839f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.217561 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.238527 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.250943 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.270101 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.270662 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.283222 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d847c56f-38ef-4aaf-a974-b347f5091038" (UID: "d847c56f-38ef-4aaf-a974-b347f5091038"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.283304 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.287609 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.291957 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data" (OuterVolumeSpecName: "config-data") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.293346 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data" (OuterVolumeSpecName: "config-data") pod "d847c56f-38ef-4aaf-a974-b347f5091038" (UID: "d847c56f-38ef-4aaf-a974-b347f5091038"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294440 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294466 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294498 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294510 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294521 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294533 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d847c56f-38ef-4aaf-a974-b347f5091038-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.294544 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.294599 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.294660 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts podName:e15a3f00-fce6-490e-9b6b-ca28d8334d25 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:42.294641547 +0000 UTC m=+1590.020638587 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts") pod "novacell0ed59-account-delete-v9rg9" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25") : configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.329746 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.356298 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf" (OuterVolumeSpecName: "server-conf") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.356507 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" (UID: "8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.356602 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.367797 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf" (OuterVolumeSpecName: "server-conf") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.386519 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397189 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397350 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397457 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts podName:d9d1ac32-bc45-41a0-b696-034ff92b13d4 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:42.397257506 +0000 UTC m=+1590.123254546 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts") pod "glanceddac-account-delete-hlrff" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4") : configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397501 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts podName:81284a21-5f4d-4135-b08e-94415569eb09 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:42.397480451 +0000 UTC m=+1590.123477491 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts") pod "novaapia811-account-delete-9vntl" (UID: "81284a21-5f4d-4135-b08e-94415569eb09") : configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397524 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397570 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts podName:b5674946-023d-45c0-a0bf-373aa5d7ee65 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:42.397557383 +0000 UTC m=+1590.123554433 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts") pod "neutronadda-account-delete-wpdtm" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65") : configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397678 4863 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.397715 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts podName:7b21df5e-065a-4c62-b271-704c86b97f58 nodeName:}" failed. No retries permitted until 2025-12-05 07:12:42.397701696 +0000 UTC m=+1590.123698736 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts") pod "barbican6b9e-account-delete-7nrpl" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58") : configmap "openstack-scripts" not found Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.398576 4863 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c908ae8d-4ec2-4938-819c-0ba2ee26f209-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.398609 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.398636 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.398655 4863 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46586650-4568-4f5e-9854-30f6e0291b6b-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.424804 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.437858 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-spckz"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.445593 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.451001 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "d4703140-cc56-4eb8-b06b-1033916a839f" (UID: "d4703140-cc56-4eb8-b06b-1033916a839f"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.452163 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-spckz"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.459556 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-776db75b76-jmjll"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.471906 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement1771-account-delete-h4fq8"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.492420 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1771-account-create-update-wpz6n"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.496873 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "46586650-4568-4f5e-9854-30f6e0291b6b" (UID: "46586650-4568-4f5e-9854-30f6e0291b6b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.502037 4863 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4703140-cc56-4eb8-b06b-1033916a839f-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.502073 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.502087 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46586650-4568-4f5e-9854-30f6e0291b6b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.507793 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c908ae8d-4ec2-4938-819c-0ba2ee26f209" (UID: "c908ae8d-4ec2-4938-819c-0ba2ee26f209"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.508480 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement1771-account-delete-h4fq8"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.515146 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1771-account-create-update-wpz6n"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.529907 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-gr997"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.537386 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-gr997"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.545196 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.553726 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6b9e-account-create-update-cqtwp"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.560159 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6b9e-account-create-update-cqtwp"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.574001 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-q6zh7"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.579506 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-q6zh7"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.596537 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.605047 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c908ae8d-4ec2-4938-819c-0ba2ee26f209-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.619515 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" path="/var/lib/kubelet/pods/0752f5c6-12cf-4208-b523-f970b63f1b4b/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.620505 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" path="/var/lib/kubelet/pods/3409451f-c36d-4577-8720-89f4b6dd5ec5/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.621175 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3502bece-8818-4d62-8d27-5ec406844377" path="/var/lib/kubelet/pods/3502bece-8818-4d62-8d27-5ec406844377/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.622362 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b958e2-c440-43c3-b09c-615cf1ebf331" path="/var/lib/kubelet/pods/35b958e2-c440-43c3-b09c-615cf1ebf331/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.623056 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" path="/var/lib/kubelet/pods/51b09f18-7196-4b58-b4a9-29671ae5a243/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.623776 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="695c91cf-5f69-4818-a1c1-e9739b247eca" path="/var/lib/kubelet/pods/695c91cf-5f69-4818-a1c1-e9739b247eca/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.625109 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bd6c593-c2d5-4e25-98f6-5fa0a96af771" path="/var/lib/kubelet/pods/7bd6c593-c2d5-4e25-98f6-5fa0a96af771/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.625756 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bd48690-e87b-4b7d-9ba3-46e5d33c1e72" path="/var/lib/kubelet/pods/8bd48690-e87b-4b7d-9ba3-46e5d33c1e72/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.626373 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" path="/var/lib/kubelet/pods/90f6dc4d-a9ff-4bf7-a44f-0074c56130d2/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.627627 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" path="/var/lib/kubelet/pods/92c796ad-a73e-4924-a59f-05031fcbb9d0/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.628324 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4057f14-25fe-40a6-a802-05472d2aad87" path="/var/lib/kubelet/pods/a4057f14-25fe-40a6-a802-05472d2aad87/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.628917 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afae6292-c1df-4dd3-abec-d1f493c03857" path="/var/lib/kubelet/pods/afae6292-c1df-4dd3-abec-d1f493c03857/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.631152 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6a8da69-afe1-4825-b8ad-ffb693553773" path="/var/lib/kubelet/pods/b6a8da69-afe1-4825-b8ad-ffb693553773/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.631737 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d09e490b-58f5-43e0-9500-0272f08fc3af" path="/var/lib/kubelet/pods/d09e490b-58f5-43e0-9500-0272f08fc3af/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.632559 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de4b625d-aaf6-497a-b88a-857054ba84e9" path="/var/lib/kubelet/pods/de4b625d-aaf6-497a-b88a-857054ba84e9/volumes" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.636620 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-ddac-account-create-update-bfr58"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.636649 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-ddac-account-create-update-bfr58"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.670577 4863 scope.go:117] "RemoveContainer" containerID="45f9964a635141593c0ecd0b472b9d0197658218a0e29d30f979c3117ddf5090" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.678041 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-lflg4"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.695916 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-lflg4"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.711080 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a811-account-create-update-tt5wt"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.716846 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a811-account-create-update-tt5wt"] Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.721774 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6 is running failed: container process not found" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.747793 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.766909 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6 is running failed: container process not found" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.767010 4863 scope.go:117] "RemoveContainer" containerID="0e90f617aa6517298bfd7f2ba20833ae5a734185ffcd027d050e5645935a88c4" Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.768063 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6 is running failed: container process not found" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 07:12:38 crc kubenswrapper[4863]: E1205 07:12:38.768118 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.799453 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-xmk5k"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.810556 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-xmk5k"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.820774 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.821550 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.835741 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ed59-account-create-update-wgjh7"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.837331 4863 scope.go:117] "RemoveContainer" containerID="1fb7878f5faa41f5bfe6c62080b32d439375f5649baed85652cf33bd1cdfde23" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.842013 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ed59-account-create-update-wgjh7"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.852814 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.858393 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.859636 4863 scope.go:117] "RemoveContainer" containerID="a6a3f3263a8ecdd3d1951f70646009a4f53fd5264aa1aae420054b8eeff1e7cd" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.871512 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.876494 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.891055 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.902758 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.910724 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts\") pod \"b5674946-023d-45c0-a0bf-373aa5d7ee65\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.910791 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsfb7\" (UniqueName: \"kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7\") pod \"b5674946-023d-45c0-a0bf-373aa5d7ee65\" (UID: \"b5674946-023d-45c0-a0bf-373aa5d7ee65\") " Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.911706 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b5674946-023d-45c0-a0bf-373aa5d7ee65" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.913675 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.920257 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7" (OuterVolumeSpecName: "kube-api-access-nsfb7") pod "b5674946-023d-45c0-a0bf-373aa5d7ee65" (UID: "b5674946-023d-45c0-a0bf-373aa5d7ee65"). InnerVolumeSpecName "kube-api-access-nsfb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.924711 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinderd676-account-delete-7ks4z"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.944304 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.952613 4863 scope.go:117] "RemoveContainer" containerID="9ee5b5095f60b3fcf7b47a34050adc8fc3929d01b34264ecd58fb01bc25bd387" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.956116 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.964449 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.966555 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.969670 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 07:12:38 crc kubenswrapper[4863]: I1205 07:12:38.971056 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.006940 4863 scope.go:117] "RemoveContainer" containerID="07329e6fbf3ef6acbf027dc83e5cf92b53de3edc0cffa4a41162cabe931ecb30" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012654 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data\") pod \"97b9e3bc-115e-4613-9e5e-4cf44651585e\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012702 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs\") pod \"97b9e3bc-115e-4613-9e5e-4cf44651585e\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012752 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs\") pod \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012775 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom\") pod \"97b9e3bc-115e-4613-9e5e-4cf44651585e\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012829 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom\") pod \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012858 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d7n4\" (UniqueName: \"kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4\") pod \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012879 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle\") pod \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012913 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skb5k\" (UniqueName: \"kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k\") pod \"97b9e3bc-115e-4613-9e5e-4cf44651585e\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012932 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle\") pod \"97b9e3bc-115e-4613-9e5e-4cf44651585e\" (UID: \"97b9e3bc-115e-4613-9e5e-4cf44651585e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.012951 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data\") pod \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\" (UID: \"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.013173 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs" (OuterVolumeSpecName: "logs") pod "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" (UID: "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.013298 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5674946-023d-45c0-a0bf-373aa5d7ee65-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.013316 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsfb7\" (UniqueName: \"kubernetes.io/projected/b5674946-023d-45c0-a0bf-373aa5d7ee65-kube-api-access-nsfb7\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.013330 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.014078 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs" (OuterVolumeSpecName: "logs") pod "97b9e3bc-115e-4613-9e5e-4cf44651585e" (UID: "97b9e3bc-115e-4613-9e5e-4cf44651585e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.017435 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" (UID: "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.019595 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "97b9e3bc-115e-4613-9e5e-4cf44651585e" (UID: "97b9e3bc-115e-4613-9e5e-4cf44651585e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.019725 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k" (OuterVolumeSpecName: "kube-api-access-skb5k") pod "97b9e3bc-115e-4613-9e5e-4cf44651585e" (UID: "97b9e3bc-115e-4613-9e5e-4cf44651585e"). InnerVolumeSpecName "kube-api-access-skb5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.019866 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4" (OuterVolumeSpecName: "kube-api-access-9d7n4") pod "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" (UID: "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6"). InnerVolumeSpecName "kube-api-access-9d7n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.029110 4863 scope.go:117] "RemoveContainer" containerID="5cba4e7d73b12da2bff1545d1174d69edd005114aec052ce29065a33f442e758" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.042479 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97b9e3bc-115e-4613-9e5e-4cf44651585e" (UID: "97b9e3bc-115e-4613-9e5e-4cf44651585e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.042625 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" (UID: "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.055153 4863 scope.go:117] "RemoveContainer" containerID="52e26abc5b133fb5aeaaf4dccc14824d17b6a85c491f8151e997e9bfef541884" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.065836 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data" (OuterVolumeSpecName: "config-data") pod "97b9e3bc-115e-4613-9e5e-4cf44651585e" (UID: "97b9e3bc-115e-4613-9e5e-4cf44651585e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.087335 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data" (OuterVolumeSpecName: "config-data") pod "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" (UID: "756dee9e-0241-4bf8-b7da-a9c9b4f92ac6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.093333 4863 scope.go:117] "RemoveContainer" containerID="8769e7f2be03e56bacce35b01c9aeb9d54425fe96a1a8359c9c0f01c6d5f1e35" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114432 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b9e3bc-115e-4613-9e5e-4cf44651585e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114463 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114486 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114495 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d7n4\" (UniqueName: \"kubernetes.io/projected/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-kube-api-access-9d7n4\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114504 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114513 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skb5k\" (UniqueName: \"kubernetes.io/projected/97b9e3bc-115e-4613-9e5e-4cf44651585e-kube-api-access-skb5k\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114521 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114528 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.114538 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b9e3bc-115e-4613-9e5e-4cf44651585e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.119788 4863 scope.go:117] "RemoveContainer" containerID="2f4a46d4b57df2d6571ee3d4e278638e04cc4cea822ade66d2b1300772c45f1c" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.148665 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" event={"ID":"756dee9e-0241-4bf8-b7da-a9c9b4f92ac6","Type":"ContainerDied","Data":"efbfb9595668a4664aaebb91a98897ac887662d69a0da20dd984d1c9016179bb"} Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.148761 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bd7bc7b54-vzjx4" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.152570 4863 generic.go:334] "Generic (PLEG): container finished" podID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" exitCode=0 Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.152709 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ef02ff71-0212-4b81-8243-18e2d28b828e","Type":"ContainerDied","Data":"757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6"} Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.161831 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronadda-account-delete-wpdtm" event={"ID":"b5674946-023d-45c0-a0bf-373aa5d7ee65","Type":"ContainerDied","Data":"21ca6ff77da49d569fe51a361bbf07624043c771f66bff6340425a9b7ddac3f9"} Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.161934 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronadda-account-delete-wpdtm" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.175844 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-594bb7dbb9-862q2" event={"ID":"97b9e3bc-115e-4613-9e5e-4cf44651585e","Type":"ContainerDied","Data":"d7b3af1e9115ebbee604cee0520867054a7f71340e8e9363dad6615f21985057"} Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.176136 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-594bb7dbb9-862q2" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.177112 4863 scope.go:117] "RemoveContainer" containerID="d9b3e236cd29a2fd2e824692a5204ae8a950e3f494022435b0779812ac5c3dd1" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.185347 4863 generic.go:334] "Generic (PLEG): container finished" podID="26d1df4f-5673-4b66-ad39-6da15197ef72" containerID="954c1f5c6657fcb41a451ef64463595a73b0405b651fcdb2833bcc61d54b9090" exitCode=0 Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.185487 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-558b46f87f-4r8fh" event={"ID":"26d1df4f-5673-4b66-ad39-6da15197ef72","Type":"ContainerDied","Data":"954c1f5c6657fcb41a451ef64463595a73b0405b651fcdb2833bcc61d54b9090"} Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.187326 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.187337 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.228131 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.235596 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.236542 4863 scope.go:117] "RemoveContainer" containerID="021d8eac52f783b7221dafaa63c90c000b5e8040770750c9ddb21dc8b652ebeb" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.274849 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronadda-account-delete-wpdtm"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.289359 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.299530 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-bd7bc7b54-vzjx4"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.316208 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6h9x\" (UniqueName: \"kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x\") pod \"7b21df5e-065a-4c62-b271-704c86b97f58\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.316285 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts\") pod \"7b21df5e-065a-4c62-b271-704c86b97f58\" (UID: \"7b21df5e-065a-4c62-b271-704c86b97f58\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.317039 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b21df5e-065a-4c62-b271-704c86b97f58" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.319647 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x" (OuterVolumeSpecName: "kube-api-access-l6h9x") pod "7b21df5e-065a-4c62-b271-704c86b97f58" (UID: "7b21df5e-065a-4c62-b271-704c86b97f58"). InnerVolumeSpecName "kube-api-access-l6h9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.320043 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.337621 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-594bb7dbb9-862q2"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.348969 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.364577 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.371742 4863 scope.go:117] "RemoveContainer" containerID="61e8197d30bc11087e459d5b563b08a28f784f5a6f60b721e93bb7a88955158e" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.371892 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.375033 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.379334 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.400697 4863 scope.go:117] "RemoveContainer" containerID="f6d81c1a4f657c6fae55a4c625c5b5be2de6ad49debaa852a7501310e581a784" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.405866 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.417641 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7mx6\" (UniqueName: \"kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6\") pod \"81284a21-5f4d-4135-b08e-94415569eb09\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.417714 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts\") pod \"81284a21-5f4d-4135-b08e-94415569eb09\" (UID: \"81284a21-5f4d-4135-b08e-94415569eb09\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.417763 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl4nc\" (UniqueName: \"kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc\") pod \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.417816 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts\") pod \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\" (UID: \"d9d1ac32-bc45-41a0-b696-034ff92b13d4\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.418433 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81284a21-5f4d-4135-b08e-94415569eb09" (UID: "81284a21-5f4d-4135-b08e-94415569eb09"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.419250 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9d1ac32-bc45-41a0-b696-034ff92b13d4" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.419433 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9d1ac32-bc45-41a0-b696-034ff92b13d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.419451 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6h9x\" (UniqueName: \"kubernetes.io/projected/7b21df5e-065a-4c62-b271-704c86b97f58-kube-api-access-l6h9x\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.419463 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b21df5e-065a-4c62-b271-704c86b97f58-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.419490 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81284a21-5f4d-4135-b08e-94415569eb09-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.439655 4863 scope.go:117] "RemoveContainer" containerID="60b413aacbb400385f318f7b88f361cca235df215614238ffe8edc91b3a7e32f" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.441749 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6" (OuterVolumeSpecName: "kube-api-access-w7mx6") pod "81284a21-5f4d-4135-b08e-94415569eb09" (UID: "81284a21-5f4d-4135-b08e-94415569eb09"). InnerVolumeSpecName "kube-api-access-w7mx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.443031 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc" (OuterVolumeSpecName: "kube-api-access-pl4nc") pod "d9d1ac32-bc45-41a0-b696-034ff92b13d4" (UID: "d9d1ac32-bc45-41a0-b696-034ff92b13d4"). InnerVolumeSpecName "kube-api-access-pl4nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.522703 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7mx6\" (UniqueName: \"kubernetes.io/projected/81284a21-5f4d-4135-b08e-94415569eb09-kube-api-access-w7mx6\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.522741 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl4nc\" (UniqueName: \"kubernetes.io/projected/d9d1ac32-bc45-41a0-b696-034ff92b13d4-kube-api-access-pl4nc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.534001 4863 scope.go:117] "RemoveContainer" containerID="c98e614813f2253340a26aa424aeadf3ffe62e568ea900c57f212c1299236d9b" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.575856 4863 scope.go:117] "RemoveContainer" containerID="75550647e4a3589626d3a282bbaa0476e6580c78cc7a3e7a2b87354e2ad9f6a6" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.634347 4863 scope.go:117] "RemoveContainer" containerID="f9cbbd75cf4e441651ca07035330ebfbbf0163c249a21a5f6b8d9986b0edde6a" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.703873 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.711581 4863 scope.go:117] "RemoveContainer" containerID="8c2f71b77923ff40514479e86a2a93d8a6db31c3b4b1aa7bee31460d5cfebb4a" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.743105 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data\") pod \"ef02ff71-0212-4b81-8243-18e2d28b828e\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.743225 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jvmv\" (UniqueName: \"kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv\") pod \"ef02ff71-0212-4b81-8243-18e2d28b828e\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.743260 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle\") pod \"ef02ff71-0212-4b81-8243-18e2d28b828e\" (UID: \"ef02ff71-0212-4b81-8243-18e2d28b828e\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.749348 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv" (OuterVolumeSpecName: "kube-api-access-4jvmv") pod "ef02ff71-0212-4b81-8243-18e2d28b828e" (UID: "ef02ff71-0212-4b81-8243-18e2d28b828e"). InnerVolumeSpecName "kube-api-access-4jvmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.762225 4863 scope.go:117] "RemoveContainer" containerID="b604a52142e9d9f908783605b6df24046daf096d058499b941bcd279570ec906" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.762374 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef02ff71-0212-4b81-8243-18e2d28b828e" (UID: "ef02ff71-0212-4b81-8243-18e2d28b828e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.764522 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data" (OuterVolumeSpecName: "config-data") pod "ef02ff71-0212-4b81-8243-18e2d28b828e" (UID: "ef02ff71-0212-4b81-8243-18e2d28b828e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.793878 4863 scope.go:117] "RemoveContainer" containerID="de7ae6772610824286f4a36b1feda8b4ab485b788e3f580e7258992e82f32077" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.820974 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.825482 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.837189 4863 scope.go:117] "RemoveContainer" containerID="2d5bab542db02ef97af154097578dd404d6d1379735eb1ccb0151edc08a521c2" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.844676 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.844712 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jvmv\" (UniqueName: \"kubernetes.io/projected/ef02ff71-0212-4b81-8243-18e2d28b828e-kube-api-access-4jvmv\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.844723 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef02ff71-0212-4b81-8243-18e2d28b828e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945374 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945505 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptfpp\" (UniqueName: \"kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp\") pod \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945545 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945565 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5xq6\" (UniqueName: \"kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945600 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts\") pod \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\" (UID: \"e15a3f00-fce6-490e-9b6b-ca28d8334d25\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945618 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945641 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945666 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945686 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.945749 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data\") pod \"26d1df4f-5673-4b66-ad39-6da15197ef72\" (UID: \"26d1df4f-5673-4b66-ad39-6da15197ef72\") " Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.946100 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e15a3f00-fce6-490e-9b6b-ca28d8334d25" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.948876 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.951057 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.952685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6" (OuterVolumeSpecName: "kube-api-access-d5xq6") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "kube-api-access-d5xq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.953018 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp" (OuterVolumeSpecName: "kube-api-access-ptfpp") pod "e15a3f00-fce6-490e-9b6b-ca28d8334d25" (UID: "e15a3f00-fce6-490e-9b6b-ca28d8334d25"). InnerVolumeSpecName "kube-api-access-ptfpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.955741 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts" (OuterVolumeSpecName: "scripts") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.964711 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data" (OuterVolumeSpecName: "config-data") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.965779 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:39 crc kubenswrapper[4863]: I1205 07:12:39.987331 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.008766 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "26d1df4f-5673-4b66-ad39-6da15197ef72" (UID: "26d1df4f-5673-4b66-ad39-6da15197ef72"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047714 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptfpp\" (UniqueName: \"kubernetes.io/projected/e15a3f00-fce6-490e-9b6b-ca28d8334d25-kube-api-access-ptfpp\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047745 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047755 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5xq6\" (UniqueName: \"kubernetes.io/projected/26d1df4f-5673-4b66-ad39-6da15197ef72-kube-api-access-d5xq6\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047763 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e15a3f00-fce6-490e-9b6b-ca28d8334d25-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047772 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047780 4863 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047793 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047802 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047810 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.047818 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26d1df4f-5673-4b66-ad39-6da15197ef72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.217113 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6b9e-account-delete-7nrpl" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.217243 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6b9e-account-delete-7nrpl" event={"ID":"7b21df5e-065a-4c62-b271-704c86b97f58","Type":"ContainerDied","Data":"2158e4fa57c0b26d966997da3d6c0b65ac62c2da4f2fd0ba488e1559cc7c76c0"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.217323 4863 scope.go:117] "RemoveContainer" containerID="7bb6b44e29c07ac7ad2848e995dffeb54c63a3e75e8fcb40dd8dabf5b26b9a14" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.221844 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.222079 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ef02ff71-0212-4b81-8243-18e2d28b828e","Type":"ContainerDied","Data":"6e869b05d2334aad77eefe80f30464f62def6965b25799672450931bd1399f54"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.229911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapia811-account-delete-9vntl" event={"ID":"81284a21-5f4d-4135-b08e-94415569eb09","Type":"ContainerDied","Data":"1b171bb6a3fe720e2deaa231c4bcf99892feafb2b9ae09fcb7bbeabaaad7bc07"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.230020 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapia811-account-delete-9vntl" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.238913 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0ed59-account-delete-v9rg9" event={"ID":"e15a3f00-fce6-490e-9b6b-ca28d8334d25","Type":"ContainerDied","Data":"4e3e6cba36f5f2ba67fc6d9696a91b34a32f6623e6f389254f47e7679895c09c"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.239011 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0ed59-account-delete-v9rg9" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.261001 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceddac-account-delete-hlrff" event={"ID":"d9d1ac32-bc45-41a0-b696-034ff92b13d4","Type":"ContainerDied","Data":"dcc817d82a320bf2049d52cd9804c52e097f76d17d0c31a7c470eb715621ff96"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.261095 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glanceddac-account-delete-hlrff" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.267014 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-558b46f87f-4r8fh" event={"ID":"26d1df4f-5673-4b66-ad39-6da15197ef72","Type":"ContainerDied","Data":"747529b6cd19ad9be318c1eca48f2e228e5b26a298b6592e90c170426c5a4944"} Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.267069 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-558b46f87f-4r8fh" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.324229 4863 scope.go:117] "RemoveContainer" containerID="757429a1e03081609ee7c9f84df4c1bca7a4a4e20ce1faa970b0398c974257e6" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.360132 4863 scope.go:117] "RemoveContainer" containerID="e18bc44de06fc508590f1a10ba71eeaed8e5f29bae81bf79ed858ecc31de0d96" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.372823 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.388139 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0ed59-account-delete-v9rg9"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.398606 4863 scope.go:117] "RemoveContainer" containerID="d71782c7a5ffb55ae64289ad77c98d1cb841124b3e5fa434d89d4e25da13e56a" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.410044 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.423571 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapia811-account-delete-9vntl"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.428037 4863 scope.go:117] "RemoveContainer" containerID="fa5daef059bea9be99399c23fdbffecb7ff93208b4415853aee9526be0c253b4" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.436349 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.443425 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.459971 4863 scope.go:117] "RemoveContainer" containerID="954c1f5c6657fcb41a451ef64463595a73b0405b651fcdb2833bcc61d54b9090" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.472653 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.510292 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican6b9e-account-delete-7nrpl"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.522007 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.548562 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-558b46f87f-4r8fh"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.571755 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.578823 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glanceddac-account-delete-hlrff"] Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.612863 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0765ebea-20ed-4ada-8031-3871a35e5f11" path="/var/lib/kubelet/pods/0765ebea-20ed-4ada-8031-3871a35e5f11/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.613690 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26d1df4f-5673-4b66-ad39-6da15197ef72" path="/var/lib/kubelet/pods/26d1df4f-5673-4b66-ad39-6da15197ef72/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.614201 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" path="/var/lib/kubelet/pods/31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.615421 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" path="/var/lib/kubelet/pods/46586650-4568-4f5e-9854-30f6e0291b6b/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.616115 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" path="/var/lib/kubelet/pods/5c1e2892-03e9-4f09-84ce-0c91842108cc/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.616753 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" path="/var/lib/kubelet/pods/756dee9e-0241-4bf8-b7da-a9c9b4f92ac6/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.617829 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" path="/var/lib/kubelet/pods/7b21df5e-065a-4c62-b271-704c86b97f58/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.618328 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81284a21-5f4d-4135-b08e-94415569eb09" path="/var/lib/kubelet/pods/81284a21-5f4d-4135-b08e-94415569eb09/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.618852 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" path="/var/lib/kubelet/pods/8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.619869 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" path="/var/lib/kubelet/pods/97b9e3bc-115e-4613-9e5e-4cf44651585e/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.620375 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76" path="/var/lib/kubelet/pods/99f3d13b-c5bd-4fe3-98de-cf92fc3a5d76/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.620862 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" path="/var/lib/kubelet/pods/b5674946-023d-45c0-a0bf-373aa5d7ee65/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.621746 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb1b0924-31b6-4b28-b187-8615b5e35545" path="/var/lib/kubelet/pods/bb1b0924-31b6-4b28-b187-8615b5e35545/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.622672 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be1d160c-8541-4f16-9897-99aacd346223" path="/var/lib/kubelet/pods/be1d160c-8541-4f16-9897-99aacd346223/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.629513 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" path="/var/lib/kubelet/pods/c908ae8d-4ec2-4938-819c-0ba2ee26f209/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.630221 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf784937-80ca-4588-a6bb-5dd64800c6dd" path="/var/lib/kubelet/pods/cf784937-80ca-4588-a6bb-5dd64800c6dd/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.630846 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" path="/var/lib/kubelet/pods/d4703140-cc56-4eb8-b06b-1033916a839f/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.631975 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" path="/var/lib/kubelet/pods/d847c56f-38ef-4aaf-a974-b347f5091038/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.632515 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" path="/var/lib/kubelet/pods/d9d1ac32-bc45-41a0-b696-034ff92b13d4/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.633087 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0da7943-3388-478a-bc9d-58c07a9f343d" path="/var/lib/kubelet/pods/e0da7943-3388-478a-bc9d-58c07a9f343d/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.634250 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" path="/var/lib/kubelet/pods/e15a3f00-fce6-490e-9b6b-ca28d8334d25/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: I1205 07:12:40.634829 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" path="/var/lib/kubelet/pods/ef02ff71-0212-4b81-8243-18e2d28b828e/volumes" Dec 05 07:12:40 crc kubenswrapper[4863]: E1205 07:12:40.702760 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 07:12:40 crc kubenswrapper[4863]: E1205 07:12:40.704837 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 07:12:40 crc kubenswrapper[4863]: E1205 07:12:40.706724 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 07:12:40 crc kubenswrapper[4863]: E1205 07:12:40.706801 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="galera" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.282568 4863 generic.go:334] "Generic (PLEG): container finished" podID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerID="a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" exitCode=0 Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.282674 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerDied","Data":"a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e"} Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.288155 4863 generic.go:334] "Generic (PLEG): container finished" podID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerID="8a42fab69785176997b7e2fb38ed78927e9b906b9e9eb6e7ad136bfb756c4118" exitCode=0 Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.289015 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerDied","Data":"8a42fab69785176997b7e2fb38ed78927e9b906b9e9eb6e7ad136bfb756c4118"} Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.712638 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.774380 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.787779 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.787857 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxr99\" (UniqueName: \"kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.787884 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.787968 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.787993 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.788016 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.788041 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.788054 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs\") pod \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\" (UID: \"0ebeed61-b530-43f8-bb15-5e42fa95f1b9\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.789121 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.792378 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.795174 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts" (OuterVolumeSpecName: "scripts") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.795327 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99" (OuterVolumeSpecName: "kube-api-access-wxr99") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "kube-api-access-wxr99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.815510 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.825714 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.854417 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.874625 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data" (OuterVolumeSpecName: "config-data") pod "0ebeed61-b530-43f8-bb15-5e42fa95f1b9" (UID: "0ebeed61-b530-43f8-bb15-5e42fa95f1b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.888985 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889045 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889088 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889115 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889154 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889174 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889227 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz2t9\" (UniqueName: \"kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889267 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs\") pod \"29b5ef50-8884-4ebe-bc29-291301e97e69\" (UID: \"29b5ef50-8884-4ebe-bc29-291301e97e69\") " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889517 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889527 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889535 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889545 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889554 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889561 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889571 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxr99\" (UniqueName: \"kubernetes.io/projected/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-kube-api-access-wxr99\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.889579 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ebeed61-b530-43f8-bb15-5e42fa95f1b9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.890017 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.890115 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.890196 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.890426 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.894977 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9" (OuterVolumeSpecName: "kube-api-access-qz2t9") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "kube-api-access-qz2t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.899103 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.911585 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.938679 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "29b5ef50-8884-4ebe-bc29-291301e97e69" (UID: "29b5ef50-8884-4ebe-bc29-291301e97e69"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991142 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991184 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991199 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991208 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991216 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991225 4863 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29b5ef50-8884-4ebe-bc29-291301e97e69-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991233 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz2t9\" (UniqueName: \"kubernetes.io/projected/29b5ef50-8884-4ebe-bc29-291301e97e69-kube-api-access-qz2t9\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:41 crc kubenswrapper[4863]: I1205 07:12:41.991244 4863 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b5ef50-8884-4ebe-bc29-291301e97e69-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.007326 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.092370 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.187141 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.187651 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.188077 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.188151 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.189100 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.190461 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.191895 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:42 crc kubenswrapper[4863]: E1205 07:12:42.191924 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.304077 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"29b5ef50-8884-4ebe-bc29-291301e97e69","Type":"ContainerDied","Data":"f066c8d9083a4acb8d75e89ac76a6ce5908b02b33e9ed9fe3a21677fd86c7231"} Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.304130 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.304335 4863 scope.go:117] "RemoveContainer" containerID="a1c519f1d2d8171b4c30a529c6d8dcda21d2d5ef38b264cd40230650ac4f3e4e" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.307952 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ebeed61-b530-43f8-bb15-5e42fa95f1b9","Type":"ContainerDied","Data":"f7447b3f181ff6f96724223418dfac1b61ec5efa59e6634b16ec966894960cf0"} Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.308095 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.333793 4863 scope.go:117] "RemoveContainer" containerID="6e3a0222b6cd3c3e6e14d0553d74c6df24539ef40f69b913d38c882123e05175" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.352393 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.366221 4863 scope.go:117] "RemoveContainer" containerID="7550e24c5734f210c7db5088ea7f7de89cf17da52278a4790f7d7af94780b9ee" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.374200 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.380041 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.386293 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.398832 4863 scope.go:117] "RemoveContainer" containerID="317ef2eee14e624241ec945a7fdc4c1afe943f1965f78feb5dc586e82047769c" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.416944 4863 scope.go:117] "RemoveContainer" containerID="8a42fab69785176997b7e2fb38ed78927e9b906b9e9eb6e7ad136bfb756c4118" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.432030 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/memcached-0" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.103:11211: i/o timeout" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.437165 4863 scope.go:117] "RemoveContainer" containerID="faa93203b4e74ae18536fcc22dae3d5e667b4f4cb6230251e992eb4cbb666258" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.610948 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" path="/var/lib/kubelet/pods/0ebeed61-b530-43f8-bb15-5e42fa95f1b9/volumes" Dec 05 07:12:42 crc kubenswrapper[4863]: I1205 07:12:42.611903 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" path="/var/lib/kubelet/pods/29b5ef50-8884-4ebe-bc29-291301e97e69/volumes" Dec 05 07:12:43 crc kubenswrapper[4863]: I1205 07:12:43.602725 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:12:43 crc kubenswrapper[4863]: E1205 07:12:43.603806 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.186500 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.187195 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.187511 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.187557 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.189912 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.191300 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.192920 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:47 crc kubenswrapper[4863]: E1205 07:12:47.192958 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.051773 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200398 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnnb7\" (UniqueName: \"kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200455 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200532 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200607 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200648 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200667 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.200692 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs\") pod \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\" (UID: \"fcb2529a-46f2-4b17-bb95-8ef2a119f222\") " Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.205762 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.207322 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7" (OuterVolumeSpecName: "kube-api-access-mnnb7") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "kube-api-access-mnnb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.237307 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.252341 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.255643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.265632 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config" (OuterVolumeSpecName: "config") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.276761 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "fcb2529a-46f2-4b17-bb95-8ef2a119f222" (UID: "fcb2529a-46f2-4b17-bb95-8ef2a119f222"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302869 4863 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302903 4863 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302915 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302927 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302937 4863 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302949 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnnb7\" (UniqueName: \"kubernetes.io/projected/fcb2529a-46f2-4b17-bb95-8ef2a119f222-kube-api-access-mnnb7\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.302963 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fcb2529a-46f2-4b17-bb95-8ef2a119f222-config\") on node \"crc\" DevicePath \"\"" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.371506 4863 generic.go:334] "Generic (PLEG): container finished" podID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerID="3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4" exitCode=0 Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.371580 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerDied","Data":"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4"} Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.371637 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-659b8866f7-wfh8q" event={"ID":"fcb2529a-46f2-4b17-bb95-8ef2a119f222","Type":"ContainerDied","Data":"637fd1fd417d45f1092909a3ff9c5c60d4cb1467c19a4499dd3b3e1386a038b2"} Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.371674 4863 scope.go:117] "RemoveContainer" containerID="026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.371894 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-659b8866f7-wfh8q" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.402866 4863 scope.go:117] "RemoveContainer" containerID="3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.415219 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.422630 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-659b8866f7-wfh8q"] Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.435571 4863 scope.go:117] "RemoveContainer" containerID="026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99" Dec 05 07:12:48 crc kubenswrapper[4863]: E1205 07:12:48.435938 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99\": container with ID starting with 026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99 not found: ID does not exist" containerID="026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.435973 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99"} err="failed to get container status \"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99\": rpc error: code = NotFound desc = could not find container \"026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99\": container with ID starting with 026de01b456fffc9f78b1b9192f726ad47d449fb1144e30809aa7daa26643a99 not found: ID does not exist" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.436000 4863 scope.go:117] "RemoveContainer" containerID="3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4" Dec 05 07:12:48 crc kubenswrapper[4863]: E1205 07:12:48.436380 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4\": container with ID starting with 3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4 not found: ID does not exist" containerID="3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.436412 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4"} err="failed to get container status \"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4\": rpc error: code = NotFound desc = could not find container \"3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4\": container with ID starting with 3d4eb11837d9c82da123b9f7cf05ce1db6c77875320307c7ea9303a6e046aca4 not found: ID does not exist" Dec 05 07:12:48 crc kubenswrapper[4863]: I1205 07:12:48.625833 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" path="/var/lib/kubelet/pods/fcb2529a-46f2-4b17-bb95-8ef2a119f222/volumes" Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.186359 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.187025 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.187558 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.187600 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.188406 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.190321 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.192029 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:52 crc kubenswrapper[4863]: E1205 07:12:52.192092 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:55 crc kubenswrapper[4863]: I1205 07:12:55.601375 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:12:55 crc kubenswrapper[4863]: E1205 07:12:55.602141 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.186329 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.187341 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.187691 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.187738 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.188001 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.190150 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.194093 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 07:12:57 crc kubenswrapper[4863]: E1205 07:12:57.194397 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-nsmzq" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:12:59 crc kubenswrapper[4863]: I1205 07:12:59.921903 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.068219 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"11854f2b-ca24-48c8-b33b-60558484ea0a\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.068277 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache\") pod \"11854f2b-ca24-48c8-b33b-60558484ea0a\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.068335 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock\") pod \"11854f2b-ca24-48c8-b33b-60558484ea0a\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.068411 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") pod \"11854f2b-ca24-48c8-b33b-60558484ea0a\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.068483 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvrvt\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt\") pod \"11854f2b-ca24-48c8-b33b-60558484ea0a\" (UID: \"11854f2b-ca24-48c8-b33b-60558484ea0a\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.069077 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache" (OuterVolumeSpecName: "cache") pod "11854f2b-ca24-48c8-b33b-60558484ea0a" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.069179 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock" (OuterVolumeSpecName: "lock") pod "11854f2b-ca24-48c8-b33b-60558484ea0a" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.075725 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "11854f2b-ca24-48c8-b33b-60558484ea0a" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.090095 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt" (OuterVolumeSpecName: "kube-api-access-xvrvt") pod "11854f2b-ca24-48c8-b33b-60558484ea0a" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a"). InnerVolumeSpecName "kube-api-access-xvrvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.091409 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "swift") pod "11854f2b-ca24-48c8-b33b-60558484ea0a" (UID: "11854f2b-ca24-48c8-b33b-60558484ea0a"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.157897 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nsmzq_3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3/ovs-vswitchd/0.log" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.158903 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.170224 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.170253 4863 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-cache\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.170263 4863 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/11854f2b-ca24-48c8-b33b-60558484ea0a-lock\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.170274 4863 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.170286 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvrvt\" (UniqueName: \"kubernetes.io/projected/11854f2b-ca24-48c8-b33b-60558484ea0a-kube-api-access-xvrvt\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.187588 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271017 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271107 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkzm5\" (UniqueName: \"kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271141 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271173 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271256 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271375 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs\") pod \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\" (UID: \"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3\") " Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271553 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib" (OuterVolumeSpecName: "var-lib") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271618 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log" (OuterVolumeSpecName: "var-log") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.271646 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run" (OuterVolumeSpecName: "var-run") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.272026 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.272050 4863 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.272063 4863 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.272077 4863 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-lib\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.272087 4863 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.273157 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts" (OuterVolumeSpecName: "scripts") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.277737 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5" (OuterVolumeSpecName: "kube-api-access-mkzm5") pod "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" (UID: "3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3"). InnerVolumeSpecName "kube-api-access-mkzm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.373589 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkzm5\" (UniqueName: \"kubernetes.io/projected/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-kube-api-access-mkzm5\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.373623 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.485655 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nsmzq_3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3/ovs-vswitchd/0.log" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.487026 4863 generic.go:334] "Generic (PLEG): container finished" podID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" exitCode=137 Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.487112 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nsmzq" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.487146 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerDied","Data":"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8"} Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.487209 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nsmzq" event={"ID":"3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3","Type":"ContainerDied","Data":"87b6dd9bf2b89af8f45d97fe953d13eb7f3d9538772189c699d7da1d9df7d34a"} Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.487248 4863 scope.go:117] "RemoveContainer" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.497639 4863 generic.go:334] "Generic (PLEG): container finished" podID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerID="e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd" exitCode=137 Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.497691 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd"} Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.497731 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"11854f2b-ca24-48c8-b33b-60558484ea0a","Type":"ContainerDied","Data":"cf85fe32cab18eb5e4b91c42dac04e58c9cbb1ecd3510138556917bad77673ea"} Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.497807 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.534604 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.542848 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-nsmzq"] Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.580916 4863 scope.go:117] "RemoveContainer" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.614172 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" path="/var/lib/kubelet/pods/3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3/volumes" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.638581 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.644246 4863 scope.go:117] "RemoveContainer" containerID="10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.647703 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.675217 4863 scope.go:117] "RemoveContainer" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.675719 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8\": container with ID starting with 7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8 not found: ID does not exist" containerID="7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.675767 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8"} err="failed to get container status \"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8\": rpc error: code = NotFound desc = could not find container \"7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8\": container with ID starting with 7ffab8e1d5138c8a40832e8045cebf4da77112b7bcf2b7347d9307f99b5601c8 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.675795 4863 scope.go:117] "RemoveContainer" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.676254 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150\": container with ID starting with 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 not found: ID does not exist" containerID="4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.676297 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150"} err="failed to get container status \"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150\": rpc error: code = NotFound desc = could not find container \"4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150\": container with ID starting with 4d57b186282060c612d918b08cf4d589ca67806466633fe1e05bb201f5a7a150 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.676345 4863 scope.go:117] "RemoveContainer" containerID="10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.676776 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d\": container with ID starting with 10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d not found: ID does not exist" containerID="10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.676817 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d"} err="failed to get container status \"10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d\": rpc error: code = NotFound desc = could not find container \"10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d\": container with ID starting with 10d22ef0eb4bfad044a2e3de8194886ced6a21642c573e5eca043d702be52c7d not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.676845 4863 scope.go:117] "RemoveContainer" containerID="e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.699243 4863 scope.go:117] "RemoveContainer" containerID="17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.718189 4863 scope.go:117] "RemoveContainer" containerID="84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.736437 4863 scope.go:117] "RemoveContainer" containerID="e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.757121 4863 scope.go:117] "RemoveContainer" containerID="471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.776689 4863 scope.go:117] "RemoveContainer" containerID="eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.797598 4863 scope.go:117] "RemoveContainer" containerID="28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.818247 4863 scope.go:117] "RemoveContainer" containerID="f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.835928 4863 scope.go:117] "RemoveContainer" containerID="25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.854988 4863 scope.go:117] "RemoveContainer" containerID="98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.881684 4863 scope.go:117] "RemoveContainer" containerID="25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.902574 4863 scope.go:117] "RemoveContainer" containerID="0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.921040 4863 scope.go:117] "RemoveContainer" containerID="216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.940109 4863 scope.go:117] "RemoveContainer" containerID="24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.959570 4863 scope.go:117] "RemoveContainer" containerID="cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.982956 4863 scope.go:117] "RemoveContainer" containerID="e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.983536 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd\": container with ID starting with e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd not found: ID does not exist" containerID="e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.983609 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd"} err="failed to get container status \"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd\": rpc error: code = NotFound desc = could not find container \"e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd\": container with ID starting with e234e98a522a3e92866abfa68f52cbd5eba3efee884b60b1133110b50d119bdd not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.983636 4863 scope.go:117] "RemoveContainer" containerID="17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.984134 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5\": container with ID starting with 17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5 not found: ID does not exist" containerID="17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984192 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5"} err="failed to get container status \"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5\": rpc error: code = NotFound desc = could not find container \"17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5\": container with ID starting with 17211a308c4f7142797782bd24c9591a1a7e240c4bcb0ee77b81760db7c801c5 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984225 4863 scope.go:117] "RemoveContainer" containerID="84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.984573 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a\": container with ID starting with 84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a not found: ID does not exist" containerID="84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984608 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a"} err="failed to get container status \"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a\": rpc error: code = NotFound desc = could not find container \"84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a\": container with ID starting with 84841c7ce4eb2ddaa897e8b3719f84ca92f7e3b1ec6898fdd08cfe8374de7a7a not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984629 4863 scope.go:117] "RemoveContainer" containerID="e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.984912 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde\": container with ID starting with e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde not found: ID does not exist" containerID="e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984939 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde"} err="failed to get container status \"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde\": rpc error: code = NotFound desc = could not find container \"e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde\": container with ID starting with e31bfa2100e68d364a056a124d4630ff0a6da2326ce27d1e3b96463a46109fde not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.984954 4863 scope.go:117] "RemoveContainer" containerID="471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.985504 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab\": container with ID starting with 471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab not found: ID does not exist" containerID="471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.985536 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab"} err="failed to get container status \"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab\": rpc error: code = NotFound desc = could not find container \"471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab\": container with ID starting with 471d26acf303b9d2c96e80261f04514e903cff90f2ac56898a0534c22c49c1ab not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.985557 4863 scope.go:117] "RemoveContainer" containerID="eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.986083 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311\": container with ID starting with eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311 not found: ID does not exist" containerID="eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.986108 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311"} err="failed to get container status \"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311\": rpc error: code = NotFound desc = could not find container \"eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311\": container with ID starting with eadd012482475f2739bb023d989032bc0de647a75d8e29574659fb5f4be8d311 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.986145 4863 scope.go:117] "RemoveContainer" containerID="28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.986464 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164\": container with ID starting with 28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164 not found: ID does not exist" containerID="28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.986520 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164"} err="failed to get container status \"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164\": rpc error: code = NotFound desc = could not find container \"28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164\": container with ID starting with 28826241675b277fb519ec6809532587bcafcccff708f1941f508778d3afb164 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.986541 4863 scope.go:117] "RemoveContainer" containerID="f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.986979 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc\": container with ID starting with f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc not found: ID does not exist" containerID="f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987004 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc"} err="failed to get container status \"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc\": rpc error: code = NotFound desc = could not find container \"f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc\": container with ID starting with f75bfcb74b1f66fd78df376902639e1fcf2a37d04833406eb9333bfeb197ccbc not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987020 4863 scope.go:117] "RemoveContainer" containerID="25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.987326 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba\": container with ID starting with 25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba not found: ID does not exist" containerID="25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987359 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba"} err="failed to get container status \"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba\": rpc error: code = NotFound desc = could not find container \"25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba\": container with ID starting with 25ef8f0648f138b06a24f9a90e3f15101db332d9dd3943fe3701d1b702f6c2ba not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987378 4863 scope.go:117] "RemoveContainer" containerID="98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.987659 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468\": container with ID starting with 98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468 not found: ID does not exist" containerID="98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987681 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468"} err="failed to get container status \"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468\": rpc error: code = NotFound desc = could not find container \"98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468\": container with ID starting with 98674a39589e7a18057aa768ec07d4af3686a22cf90ee633d7508b154ce83468 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.987696 4863 scope.go:117] "RemoveContainer" containerID="25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.988833 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7\": container with ID starting with 25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7 not found: ID does not exist" containerID="25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.988869 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7"} err="failed to get container status \"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7\": rpc error: code = NotFound desc = could not find container \"25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7\": container with ID starting with 25665ffc1200e62011c1e0b3f9cd866ff5e04e1d646e69cbb5200adf7b8f0df7 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.988892 4863 scope.go:117] "RemoveContainer" containerID="0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.989179 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354\": container with ID starting with 0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354 not found: ID does not exist" containerID="0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.989206 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354"} err="failed to get container status \"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354\": rpc error: code = NotFound desc = could not find container \"0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354\": container with ID starting with 0bc55fd0b9c1eab1bf9f3f0f36fb72c0921214874b8fa6290474e371310f4354 not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.989222 4863 scope.go:117] "RemoveContainer" containerID="216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.989660 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa\": container with ID starting with 216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa not found: ID does not exist" containerID="216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.989689 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa"} err="failed to get container status \"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa\": rpc error: code = NotFound desc = could not find container \"216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa\": container with ID starting with 216a4fba9882c6925d97a1212de9dcdb809130d194c6a4e1ffc71dd8c1c393fa not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.989707 4863 scope.go:117] "RemoveContainer" containerID="24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.990037 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe\": container with ID starting with 24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe not found: ID does not exist" containerID="24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.990061 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe"} err="failed to get container status \"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe\": rpc error: code = NotFound desc = could not find container \"24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe\": container with ID starting with 24ddf24b904493e3bcbe02c6c8073a81966140a470f3f849cb3778b964813abe not found: ID does not exist" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.990081 4863 scope.go:117] "RemoveContainer" containerID="cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3" Dec 05 07:13:00 crc kubenswrapper[4863]: E1205 07:13:00.990624 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3\": container with ID starting with cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3 not found: ID does not exist" containerID="cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3" Dec 05 07:13:00 crc kubenswrapper[4863]: I1205 07:13:00.990656 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3"} err="failed to get container status \"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3\": rpc error: code = NotFound desc = could not find container \"cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3\": container with ID starting with cb9b400a0dd80ec6c2684f7fbed2e44053e51b7a87ea459a2bd8b9624d4beaf3 not found: ID does not exist" Dec 05 07:13:02 crc kubenswrapper[4863]: I1205 07:13:02.610695 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" path="/var/lib/kubelet/pods/11854f2b-ca24-48c8-b33b-60558484ea0a/volumes" Dec 05 07:13:08 crc kubenswrapper[4863]: I1205 07:13:08.602986 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:13:08 crc kubenswrapper[4863]: E1205 07:13:08.603654 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:13:21 crc kubenswrapper[4863]: I1205 07:13:21.602120 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:13:21 crc kubenswrapper[4863]: E1205 07:13:21.604392 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.340095 4863 scope.go:117] "RemoveContainer" containerID="61c0a380e2dcf0cd0908947582893330d846cc7902c15cbf37c53788715964ad" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.376171 4863 scope.go:117] "RemoveContainer" containerID="d4bdd29e9c370627b90ad1916b3bd8db2227c44957a9ebe1de4ac4bdd4fdd598" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.406002 4863 scope.go:117] "RemoveContainer" containerID="67b664271f3c16430b1157cb096916835bab111073f0c961a5118d4c48f0fc36" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.433579 4863 scope.go:117] "RemoveContainer" containerID="0f4674259baf4f8b292a83c997e5c4ccc4565c8b2862c6ae5097364bd76fe33e" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.454905 4863 scope.go:117] "RemoveContainer" containerID="f1827d0de3c80fccbdf17a0694bc33df087fbbc30925674d4a91699101340d23" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.493309 4863 scope.go:117] "RemoveContainer" containerID="c3c05af524778d23854dd79aaf03a41bacf2d449d8d6e8cc2bbf153ace4c85eb" Dec 05 07:13:22 crc kubenswrapper[4863]: I1205 07:13:22.515901 4863 scope.go:117] "RemoveContainer" containerID="b0392e056cac39b351dc847a4b040f35dd6fedf1fc8850d8c1afc5e276ae80dd" Dec 05 07:13:34 crc kubenswrapper[4863]: I1205 07:13:34.602693 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:13:34 crc kubenswrapper[4863]: E1205 07:13:34.603458 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:13:46 crc kubenswrapper[4863]: I1205 07:13:46.602183 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:13:46 crc kubenswrapper[4863]: E1205 07:13:46.603248 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:13:58 crc kubenswrapper[4863]: I1205 07:13:58.603567 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:13:58 crc kubenswrapper[4863]: E1205 07:13:58.606623 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:14:09 crc kubenswrapper[4863]: I1205 07:14:09.602136 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:14:09 crc kubenswrapper[4863]: E1205 07:14:09.603573 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:14:21 crc kubenswrapper[4863]: I1205 07:14:21.602503 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:14:21 crc kubenswrapper[4863]: E1205 07:14:21.606646 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.165020 4863 scope.go:117] "RemoveContainer" containerID="2e41facbcb2249f288be83cb87bf2c0afc4974060007e5ece212e32278aa6b28" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.204721 4863 scope.go:117] "RemoveContainer" containerID="4e621dd30b80605ad9f283015de3b8a62f9df034896f0c16defc57d09b73ac8b" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.234033 4863 scope.go:117] "RemoveContainer" containerID="915787baed9210df0b3a0b85372a927e984fffca3f90aa2607803608cc79af28" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.279753 4863 scope.go:117] "RemoveContainer" containerID="87c7fad0a7fa0245af49dd158e6d71d4d21f77fa1b4efce62c830b18c18b21c2" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.308410 4863 scope.go:117] "RemoveContainer" containerID="76e2426791e1914af8754c91af32546638412eadaf31f056ae68063c6665734a" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.335509 4863 scope.go:117] "RemoveContainer" containerID="7d926cefd2b8f2782d82e3be6c2ab3724e1e4e1d45592a80014e9eb9c83211c5" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.366400 4863 scope.go:117] "RemoveContainer" containerID="fafe7909477cff71de085fadb4309c5acf55a0ecae93af65a21c8a8035c62191" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.406124 4863 scope.go:117] "RemoveContainer" containerID="e819614128145f00ec465bc6c91fbc37d32c0edf71454158f564f3b502d8e840" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.428058 4863 scope.go:117] "RemoveContainer" containerID="e228beda85ec5d3d08fcbd997d0c278dbb12848353920c55e183080870ee1094" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.450962 4863 scope.go:117] "RemoveContainer" containerID="36c4112304fcce868336454395c84be64360e8c50b269d0f5e8ce5d1e343c821" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.489728 4863 scope.go:117] "RemoveContainer" containerID="1c1a98b157d189ecbf275f559664f14b9b9c6e413f9b98bcc9cd673dbf5bdb22" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.521387 4863 scope.go:117] "RemoveContainer" containerID="287b7156913a5e1577175c3d7d4cdc1cb429b1b78e1d70d44014f06595459e29" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.566350 4863 scope.go:117] "RemoveContainer" containerID="9435f9db1ba045d9fc2cc4233222d9272b7c1a41bdd6a6478bbbfa56c7ac0d85" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.588494 4863 scope.go:117] "RemoveContainer" containerID="36eecc4422707dcc6d7f6c623f47769c1397206825994bc4f163e5856994d03b" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.620325 4863 scope.go:117] "RemoveContainer" containerID="38f5b53c0ea567de2f8857aaa96ae6751e0cd07901f4dfd6bb2131fde7c572fe" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.646417 4863 scope.go:117] "RemoveContainer" containerID="f25acbcdd0eb63080dbe931f5c4c38e2eb4c0c55d53e57c13682265c95473515" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.684227 4863 scope.go:117] "RemoveContainer" containerID="30ce94e4e8fe29c9f76e65496a2a56a9a11c5957f2156dbc83e7b40c0fadd4f0" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.706739 4863 scope.go:117] "RemoveContainer" containerID="a1fc455ac74128a3780203bb3d350804c33addd1bc64663641bd68d9213de7ae" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.723747 4863 scope.go:117] "RemoveContainer" containerID="e1649004a00070ec01f90918299cc6fa83e3d560abfe1e90693aa20d4579e4d5" Dec 05 07:14:23 crc kubenswrapper[4863]: I1205 07:14:23.747251 4863 scope.go:117] "RemoveContainer" containerID="867f7cf9e88b1eb78b8dadfb5286d67bf858706a6a5792e5b1e2b0cc60117820" Dec 05 07:14:35 crc kubenswrapper[4863]: I1205 07:14:35.602990 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:14:35 crc kubenswrapper[4863]: E1205 07:14:35.605899 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:14:48 crc kubenswrapper[4863]: I1205 07:14:48.602013 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:14:48 crc kubenswrapper[4863]: E1205 07:14:48.603299 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.151666 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw"] Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152596 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152610 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152626 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152632 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152695 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="ovsdbserver-nb" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152703 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="ovsdbserver-nb" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152710 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152717 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152724 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" containerName="nova-scheduler-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152730 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" containerName="nova-scheduler-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152740 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152766 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152777 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152783 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152794 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" containerName="memcached" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152800 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" containerName="memcached" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152808 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152816 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152856 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152864 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152873 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="setup-container" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152881 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="setup-container" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152895 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server-init" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152901 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server-init" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152937 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152944 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152953 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152958 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152966 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.152971 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.152979 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153005 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-server" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153016 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153022 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153032 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153038 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153048 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="rsync" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153054 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="rsync" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153093 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153103 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153117 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153125 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153138 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="setup-container" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153145 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="setup-container" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153176 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="init" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153183 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="init" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153199 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153208 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153220 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153250 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153264 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153270 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153279 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153284 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153293 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="mysql-bootstrap" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153299 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="mysql-bootstrap" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153328 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-central-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153336 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-central-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153343 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153350 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153359 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="sg-core" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153364 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="sg-core" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153371 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-reaper" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153378 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-reaper" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153408 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153415 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153424 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26d1df4f-5673-4b66-ad39-6da15197ef72" containerName="keystone-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153430 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="26d1df4f-5673-4b66-ad39-6da15197ef72" containerName="keystone-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153439 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153444 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153453 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153459 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153555 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153563 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153573 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153580 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153587 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerName="nova-cell1-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153593 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerName="nova-cell1-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153601 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153606 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153635 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153642 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153652 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153658 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153666 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153672 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153684 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="ovsdbserver-sb" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153689 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="ovsdbserver-sb" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153719 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153725 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153735 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153741 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153751 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="mysql-bootstrap" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153756 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="mysql-bootstrap" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153768 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-expirer" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153791 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-expirer" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153801 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153807 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153813 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153820 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153827 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153833 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153840 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153845 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153872 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153879 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153890 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="dnsmasq-dns" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153896 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="dnsmasq-dns" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153903 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153908 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153915 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153920 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153926 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afae6292-c1df-4dd3-abec-d1f493c03857" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153950 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="afae6292-c1df-4dd3-abec-d1f493c03857" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153960 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="cinder-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153966 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="cinder-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153976 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153982 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.153991 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.153996 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154006 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0765ebea-20ed-4ada-8031-3871a35e5f11" containerName="kube-state-metrics" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154121 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0765ebea-20ed-4ada-8031-3871a35e5f11" containerName="kube-state-metrics" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154131 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154136 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154143 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154148 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-server" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154156 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154161 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-server" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154171 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154196 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-server" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154206 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154213 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154222 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154229 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-api" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154239 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154245 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154273 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154280 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154290 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154296 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154303 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154309 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154317 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154323 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154352 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="swift-recon-cron" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154359 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="swift-recon-cron" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154366 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154372 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154381 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154386 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154394 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154400 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154425 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154432 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154439 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154445 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154455 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154460 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154493 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="probe" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154500 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="probe" Dec 05 07:15:00 crc kubenswrapper[4863]: E1205 07:15:00.154507 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-notification-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154513 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-notification-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154701 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154731 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154741 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154753 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154762 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154770 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154775 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154786 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154810 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154821 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-reaper" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154830 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154837 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154847 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc4802ae-16f2-4b9e-a153-b48e9c8325b8" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154857 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4703140-cc56-4eb8-b06b-1033916a839f" containerName="memcached" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154865 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="ovsdbserver-sb" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154891 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154899 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154907 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154914 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovs-vswitchd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154922 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154931 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="afae6292-c1df-4dd3-abec-d1f493c03857" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154939 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154945 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="ovsdbserver-nb" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154972 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b5ef50-8884-4ebe-bc29-291301e97e69" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154981 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-notification-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154989 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-updater" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.154999 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="rsync" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155010 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="90f6dc4d-a9ff-4bf7-a44f-0074c56130d2" containerName="nova-cell1-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155019 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-expirer" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155047 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0752f5c6-12cf-4208-b523-f970b63f1b4b" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155054 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c908ae8d-4ec2-4938-819c-0ba2ee26f209" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155063 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155070 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c1e2892-03e9-4f09-84ce-0c91842108cc" containerName="glance-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155078 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb2529a-46f2-4b17-bb95-8ef2a119f222" containerName="neutron-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155085 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155091 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="756dee9e-0241-4bf8-b7da-a9c9b4f92ac6" containerName="barbican-keystone-listener-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155097 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a46c1ea-72b2-4dfd-a073-72f82617ce76" containerName="cinder-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155125 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155131 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155141 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="26d1df4f-5673-4b66-ad39-6da15197ef72" containerName="keystone-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155150 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-replicator" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155158 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9d1ac32-bc45-41a0-b696-034ff92b13d4" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155164 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="swift-recon-cron" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155173 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="probe" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155181 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a1b67b-8afd-4b9a-bd8d-48e183dbb6a1" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155209 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="sg-core" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155216 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d847c56f-38ef-4aaf-a974-b347f5091038" containerName="nova-scheduler-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155225 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebeed61-b530-43f8-bb15-5e42fa95f1b9" containerName="ceilometer-central-agent" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155232 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3409451f-c36d-4577-8720-89f4b6dd5ec5" containerName="nova-metadata-metadata" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155239 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155246 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="object-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155254 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5674946-023d-45c0-a0bf-373aa5d7ee65" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155261 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c796ad-a73e-4924-a59f-05031fcbb9d0" containerName="barbican-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155285 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="97b9e3bc-115e-4613-9e5e-4cf44651585e" containerName="barbican-worker" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155292 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="account-auditor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155300 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155310 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5fba9a-0ba5-4ad6-9b3d-9c92735e57a3" containerName="ovsdb-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155316 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-httpd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155324 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3201b201-3f68-4c2e-858c-56f0c8242c68" containerName="proxy-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155333 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="753801f8-f439-415b-9674-08d58e53def8" containerName="dnsmasq-dns" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155340 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="da88ee86-9914-4396-bb33-d00d24b00c59" containerName="ovn-controller" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155364 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="132b3c8e-e25f-44ee-9d67-eccb0c2f8f91" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155372 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-api" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155379 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef02ff71-0212-4b81-8243-18e2d28b828e" containerName="nova-cell0-conductor-conductor" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155387 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155395 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bcba2d9-46f7-4696-8c9d-3c4b6fea6d2c" containerName="nova-api-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155403 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfff4892-c0b7-411f-9921-329db358dcde" containerName="openstack-network-exporter" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155411 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="79d93942-6ec7-4fea-9e05-a9c831ad3dd3" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155417 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcee685a-e1e9-4dd8-b04d-c5719c9bf771" containerName="cinder-scheduler" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155442 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="11854f2b-ca24-48c8-b33b-60558484ea0a" containerName="container-server" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155450 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="046f9d47-6b50-473f-838f-8375b6fe6389" containerName="placement-log" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155458 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd957476-007c-4882-8449-96deebe6a63c" containerName="galera" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155477 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b09f18-7196-4b58-b4a9-29671ae5a243" containerName="ovn-northd" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155486 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="46586650-4568-4f5e-9854-30f6e0291b6b" containerName="rabbitmq" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155494 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="0765ebea-20ed-4ada-8031-3871a35e5f11" containerName="kube-state-metrics" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.155501 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.156093 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.157957 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw"] Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.159200 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.159362 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.330092 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66whj\" (UniqueName: \"kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.330178 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.330307 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.431224 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66whj\" (UniqueName: \"kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.431285 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.431340 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.432265 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.442501 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.447581 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66whj\" (UniqueName: \"kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj\") pod \"collect-profiles-29415315-g5fmw\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.479962 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:00 crc kubenswrapper[4863]: I1205 07:15:00.878993 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw"] Dec 05 07:15:01 crc kubenswrapper[4863]: I1205 07:15:01.659715 4863 generic.go:334] "Generic (PLEG): container finished" podID="8a930371-3d03-4e41-bad2-de418281ec35" containerID="6c2d3e837075646e349ab77498405e42aeda4312c7aab1549f67e5e5f136e9f8" exitCode=0 Dec 05 07:15:01 crc kubenswrapper[4863]: I1205 07:15:01.659801 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" event={"ID":"8a930371-3d03-4e41-bad2-de418281ec35","Type":"ContainerDied","Data":"6c2d3e837075646e349ab77498405e42aeda4312c7aab1549f67e5e5f136e9f8"} Dec 05 07:15:01 crc kubenswrapper[4863]: I1205 07:15:01.660154 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" event={"ID":"8a930371-3d03-4e41-bad2-de418281ec35","Type":"ContainerStarted","Data":"1a47587f9a244df31bda5cb4937b5a94a1ec15b65021f5aad6382c48f2562d7a"} Dec 05 07:15:02 crc kubenswrapper[4863]: I1205 07:15:02.608595 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:15:02 crc kubenswrapper[4863]: E1205 07:15:02.609015 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:15:02 crc kubenswrapper[4863]: I1205 07:15:02.910837 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.065955 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66whj\" (UniqueName: \"kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj\") pod \"8a930371-3d03-4e41-bad2-de418281ec35\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.066722 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume\") pod \"8a930371-3d03-4e41-bad2-de418281ec35\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.066882 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume\") pod \"8a930371-3d03-4e41-bad2-de418281ec35\" (UID: \"8a930371-3d03-4e41-bad2-de418281ec35\") " Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.067275 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume" (OuterVolumeSpecName: "config-volume") pod "8a930371-3d03-4e41-bad2-de418281ec35" (UID: "8a930371-3d03-4e41-bad2-de418281ec35"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.067369 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a930371-3d03-4e41-bad2-de418281ec35-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.071716 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj" (OuterVolumeSpecName: "kube-api-access-66whj") pod "8a930371-3d03-4e41-bad2-de418281ec35" (UID: "8a930371-3d03-4e41-bad2-de418281ec35"). InnerVolumeSpecName "kube-api-access-66whj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.071850 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8a930371-3d03-4e41-bad2-de418281ec35" (UID: "8a930371-3d03-4e41-bad2-de418281ec35"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.168593 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a930371-3d03-4e41-bad2-de418281ec35-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.168628 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66whj\" (UniqueName: \"kubernetes.io/projected/8a930371-3d03-4e41-bad2-de418281ec35-kube-api-access-66whj\") on node \"crc\" DevicePath \"\"" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.676335 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" event={"ID":"8a930371-3d03-4e41-bad2-de418281ec35","Type":"ContainerDied","Data":"1a47587f9a244df31bda5cb4937b5a94a1ec15b65021f5aad6382c48f2562d7a"} Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.676388 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a47587f9a244df31bda5cb4937b5a94a1ec15b65021f5aad6382c48f2562d7a" Dec 05 07:15:03 crc kubenswrapper[4863]: I1205 07:15:03.676390 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw" Dec 05 07:15:13 crc kubenswrapper[4863]: I1205 07:15:13.602225 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:15:13 crc kubenswrapper[4863]: E1205 07:15:13.602909 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.060604 4863 scope.go:117] "RemoveContainer" containerID="7351a5d013af7e40148f906ca767b5f7a97a8357859c58e8fb82704ee3f3265f" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.102304 4863 scope.go:117] "RemoveContainer" containerID="d39e60280af797213ff567444e2ac7730357b8fbaed6b124edf76d8004e2f874" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.124880 4863 scope.go:117] "RemoveContainer" containerID="ba2040c3ada0dcc55522e0214c772509434380f6d46ec249d9a153dec7052a1c" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.150865 4863 scope.go:117] "RemoveContainer" containerID="fdb429315e2084e65170204a91111fd3a4d74c3f6f5bd180be69d6d333c44d12" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.167526 4863 scope.go:117] "RemoveContainer" containerID="5b5b447d7ac85457e6d9869110913db6a4c08d245af0f3f2e634952291917028" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.193749 4863 scope.go:117] "RemoveContainer" containerID="a818b568d348af2b6c772e6604272b3c1b40133c3a45624d57e21fb880d12574" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.217128 4863 scope.go:117] "RemoveContainer" containerID="61eb40786c6b00d03ed8b78744de2b2ad67ad6d600f5434465a0576c04fb66fc" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.236443 4863 scope.go:117] "RemoveContainer" containerID="7e0226cdb89f1865a3ba27a5a4030c13b5306021fa3f10d070ae1785ea4e1460" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.270335 4863 scope.go:117] "RemoveContainer" containerID="583ad8ddcdacbce532333b63f917dee58bcffb2149da32cbf850a4a46e8b2b4a" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.288504 4863 scope.go:117] "RemoveContainer" containerID="7ea9c5e09c184be80499aec3409d4672489370d0b2ae3c968323ecb809efe5f0" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.308823 4863 scope.go:117] "RemoveContainer" containerID="642d7702d4942e791e57b7a86cd6a9203603d21c533f093681e960d05e24e97a" Dec 05 07:15:24 crc kubenswrapper[4863]: I1205 07:15:24.333012 4863 scope.go:117] "RemoveContainer" containerID="7f0172ab698504d95c384d0d290700df45ad60c77f49b99f159ac68cb0831c65" Dec 05 07:15:27 crc kubenswrapper[4863]: I1205 07:15:27.602157 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:15:27 crc kubenswrapper[4863]: E1205 07:15:27.602798 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:15:38 crc kubenswrapper[4863]: I1205 07:15:38.603199 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:15:38 crc kubenswrapper[4863]: E1205 07:15:38.604855 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:15:49 crc kubenswrapper[4863]: I1205 07:15:49.602121 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:15:49 crc kubenswrapper[4863]: E1205 07:15:49.603110 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:16:01 crc kubenswrapper[4863]: I1205 07:16:01.602154 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:16:01 crc kubenswrapper[4863]: E1205 07:16:01.603015 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:16:14 crc kubenswrapper[4863]: I1205 07:16:14.602390 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:16:14 crc kubenswrapper[4863]: E1205 07:16:14.603360 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:16:24 crc kubenswrapper[4863]: I1205 07:16:24.495206 4863 scope.go:117] "RemoveContainer" containerID="cc1cf7408987229b4dde7b3d56a94b5b7862725d3aab7d49c4d3ef430a898d89" Dec 05 07:16:29 crc kubenswrapper[4863]: I1205 07:16:29.602255 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:16:29 crc kubenswrapper[4863]: E1205 07:16:29.603281 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:16:40 crc kubenswrapper[4863]: I1205 07:16:40.602647 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:16:40 crc kubenswrapper[4863]: E1205 07:16:40.603458 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:16:53 crc kubenswrapper[4863]: I1205 07:16:53.601851 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:16:53 crc kubenswrapper[4863]: E1205 07:16:53.602614 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:17:04 crc kubenswrapper[4863]: I1205 07:17:04.603101 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:17:04 crc kubenswrapper[4863]: E1205 07:17:04.603911 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:17:15 crc kubenswrapper[4863]: I1205 07:17:15.602346 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:17:16 crc kubenswrapper[4863]: I1205 07:17:16.769624 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7"} Dec 05 07:17:24 crc kubenswrapper[4863]: I1205 07:17:24.566317 4863 scope.go:117] "RemoveContainer" containerID="7e4f44a5714c7ca375a4b222b0a503203afe6922968573a408f16f804259ed1f" Dec 05 07:17:24 crc kubenswrapper[4863]: I1205 07:17:24.607349 4863 scope.go:117] "RemoveContainer" containerID="902837ddb220f8642d77255db0858d08e6f1e7216d770a851e390ec8b9f821e2" Dec 05 07:17:24 crc kubenswrapper[4863]: I1205 07:17:24.662625 4863 scope.go:117] "RemoveContainer" containerID="a9ebf6aa32b3d9569da7e41f2fb95b1abe093308146f955d0b68429e9cd09fd9" Dec 05 07:18:24 crc kubenswrapper[4863]: I1205 07:18:24.728882 4863 scope.go:117] "RemoveContainer" containerID="00bcf26dc0b9990e06d0e33384666ee00322bb913ab4306109d274baea0f46d4" Dec 05 07:19:24 crc kubenswrapper[4863]: I1205 07:19:24.834492 4863 scope.go:117] "RemoveContainer" containerID="a068483e959f02649f7b1e04ebce07bb70283c18af4505e5671b518d62456e55" Dec 05 07:19:24 crc kubenswrapper[4863]: I1205 07:19:24.876842 4863 scope.go:117] "RemoveContainer" containerID="df76ce8ae2a6545ac0864b16219ff2541392407a17b3828ce1a57cca34eea46d" Dec 05 07:19:38 crc kubenswrapper[4863]: I1205 07:19:38.464552 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:19:38 crc kubenswrapper[4863]: I1205 07:19:38.465364 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.764564 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:19:50 crc kubenswrapper[4863]: E1205 07:19:50.768578 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.768752 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b21df5e-065a-4c62-b271-704c86b97f58" containerName="mariadb-account-delete" Dec 05 07:19:50 crc kubenswrapper[4863]: E1205 07:19:50.768785 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a930371-3d03-4e41-bad2-de418281ec35" containerName="collect-profiles" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.768795 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a930371-3d03-4e41-bad2-de418281ec35" containerName="collect-profiles" Dec 05 07:19:50 crc kubenswrapper[4863]: E1205 07:19:50.768813 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.768821 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e15a3f00-fce6-490e-9b6b-ca28d8334d25" containerName="mariadb-account-delete" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.769071 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a930371-3d03-4e41-bad2-de418281ec35" containerName="collect-profiles" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.769094 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="81284a21-5f4d-4135-b08e-94415569eb09" containerName="mariadb-account-delete" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.770252 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.787686 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.923981 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n9xk\" (UniqueName: \"kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.924145 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:50 crc kubenswrapper[4863]: I1205 07:19:50.924185 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.027332 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.027557 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n9xk\" (UniqueName: \"kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.027652 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.027937 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.028055 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.054715 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n9xk\" (UniqueName: \"kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk\") pod \"certified-operators-6w48z\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.095825 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:19:51 crc kubenswrapper[4863]: I1205 07:19:51.548012 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:19:52 crc kubenswrapper[4863]: I1205 07:19:52.258166 4863 generic.go:334] "Generic (PLEG): container finished" podID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerID="05c8168f2c8c02fb3ad65f624146ded7747bbbb23b9e25297fb6e77837744654" exitCode=0 Dec 05 07:19:52 crc kubenswrapper[4863]: I1205 07:19:52.258254 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerDied","Data":"05c8168f2c8c02fb3ad65f624146ded7747bbbb23b9e25297fb6e77837744654"} Dec 05 07:19:52 crc kubenswrapper[4863]: I1205 07:19:52.258623 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerStarted","Data":"04626600f8c2a55e46205b52678eea1ed6cebfe290b3ad11d60e27d1ceaf250f"} Dec 05 07:19:52 crc kubenswrapper[4863]: I1205 07:19:52.265811 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:19:53 crc kubenswrapper[4863]: I1205 07:19:53.269536 4863 generic.go:334] "Generic (PLEG): container finished" podID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerID="652ad0ab130fb681461ee44f24c5656f526e26fc432011fada00e5c884a20829" exitCode=0 Dec 05 07:19:53 crc kubenswrapper[4863]: I1205 07:19:53.269609 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerDied","Data":"652ad0ab130fb681461ee44f24c5656f526e26fc432011fada00e5c884a20829"} Dec 05 07:19:54 crc kubenswrapper[4863]: I1205 07:19:54.280794 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerStarted","Data":"f830e1c24aeab036484abd5f986e93ce0d47b324081e2e2334c95005590a2366"} Dec 05 07:19:54 crc kubenswrapper[4863]: I1205 07:19:54.301689 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6w48z" podStartSLOduration=2.813025646 podStartE2EDuration="4.301667535s" podCreationTimestamp="2025-12-05 07:19:50 +0000 UTC" firstStartedPulling="2025-12-05 07:19:52.265293434 +0000 UTC m=+2019.991290514" lastFinishedPulling="2025-12-05 07:19:53.753935363 +0000 UTC m=+2021.479932403" observedRunningTime="2025-12-05 07:19:54.300390464 +0000 UTC m=+2022.026387514" watchObservedRunningTime="2025-12-05 07:19:54.301667535 +0000 UTC m=+2022.027664595" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.166834 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.168529 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.184504 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.251461 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.251584 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.251628 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx9md\" (UniqueName: \"kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.353571 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.353663 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx9md\" (UniqueName: \"kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.353783 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.354549 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.354620 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.383230 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx9md\" (UniqueName: \"kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md\") pod \"community-operators-g5d6n\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.486877 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:19:59 crc kubenswrapper[4863]: I1205 07:19:59.980009 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:20:00 crc kubenswrapper[4863]: I1205 07:20:00.327677 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerStarted","Data":"fc897b5a38cdd9c86058483e1f55d605e425ef1ddba8be91e7edbd3e9553c666"} Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.096795 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.096876 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.153003 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.338921 4863 generic.go:334] "Generic (PLEG): container finished" podID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerID="87588b48609b947025473efde8b2a2153e3600332dd824a718206df0372af2ce" exitCode=0 Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.339041 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerDied","Data":"87588b48609b947025473efde8b2a2153e3600332dd824a718206df0372af2ce"} Dec 05 07:20:01 crc kubenswrapper[4863]: I1205 07:20:01.407072 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:03 crc kubenswrapper[4863]: I1205 07:20:03.360093 4863 generic.go:334] "Generic (PLEG): container finished" podID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerID="d19b9b217d0d4551bffbabeb56a5ce758b08a2975567f8887cad3e1ee4246136" exitCode=0 Dec 05 07:20:03 crc kubenswrapper[4863]: I1205 07:20:03.360159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerDied","Data":"d19b9b217d0d4551bffbabeb56a5ce758b08a2975567f8887cad3e1ee4246136"} Dec 05 07:20:03 crc kubenswrapper[4863]: I1205 07:20:03.554834 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:20:03 crc kubenswrapper[4863]: I1205 07:20:03.555155 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6w48z" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="registry-server" containerID="cri-o://f830e1c24aeab036484abd5f986e93ce0d47b324081e2e2334c95005590a2366" gracePeriod=2 Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.381345 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerStarted","Data":"33ab21cef45a5f78a07e40474c8bee8ffbc1483730dc988a063ffd787a296312"} Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.400623 4863 generic.go:334] "Generic (PLEG): container finished" podID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerID="f830e1c24aeab036484abd5f986e93ce0d47b324081e2e2334c95005590a2366" exitCode=0 Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.400673 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerDied","Data":"f830e1c24aeab036484abd5f986e93ce0d47b324081e2e2334c95005590a2366"} Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.412872 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g5d6n" podStartSLOduration=2.686608547 podStartE2EDuration="5.412851686s" podCreationTimestamp="2025-12-05 07:19:59 +0000 UTC" firstStartedPulling="2025-12-05 07:20:01.340796841 +0000 UTC m=+2029.066793881" lastFinishedPulling="2025-12-05 07:20:04.06703998 +0000 UTC m=+2031.793037020" observedRunningTime="2025-12-05 07:20:04.406766707 +0000 UTC m=+2032.132763747" watchObservedRunningTime="2025-12-05 07:20:04.412851686 +0000 UTC m=+2032.138848736" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.501927 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.640844 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content\") pod \"7e575574-13fc-4796-ab56-d6156d1ff1f0\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.640909 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities\") pod \"7e575574-13fc-4796-ab56-d6156d1ff1f0\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.641102 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n9xk\" (UniqueName: \"kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk\") pod \"7e575574-13fc-4796-ab56-d6156d1ff1f0\" (UID: \"7e575574-13fc-4796-ab56-d6156d1ff1f0\") " Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.641794 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities" (OuterVolumeSpecName: "utilities") pod "7e575574-13fc-4796-ab56-d6156d1ff1f0" (UID: "7e575574-13fc-4796-ab56-d6156d1ff1f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.656675 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk" (OuterVolumeSpecName: "kube-api-access-8n9xk") pod "7e575574-13fc-4796-ab56-d6156d1ff1f0" (UID: "7e575574-13fc-4796-ab56-d6156d1ff1f0"). InnerVolumeSpecName "kube-api-access-8n9xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.713180 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e575574-13fc-4796-ab56-d6156d1ff1f0" (UID: "7e575574-13fc-4796-ab56-d6156d1ff1f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.742509 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n9xk\" (UniqueName: \"kubernetes.io/projected/7e575574-13fc-4796-ab56-d6156d1ff1f0-kube-api-access-8n9xk\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.742554 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:04 crc kubenswrapper[4863]: I1205 07:20:04.742568 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e575574-13fc-4796-ab56-d6156d1ff1f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.411397 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6w48z" event={"ID":"7e575574-13fc-4796-ab56-d6156d1ff1f0","Type":"ContainerDied","Data":"04626600f8c2a55e46205b52678eea1ed6cebfe290b3ad11d60e27d1ceaf250f"} Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.411875 4863 scope.go:117] "RemoveContainer" containerID="f830e1c24aeab036484abd5f986e93ce0d47b324081e2e2334c95005590a2366" Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.411432 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6w48z" Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.431638 4863 scope.go:117] "RemoveContainer" containerID="652ad0ab130fb681461ee44f24c5656f526e26fc432011fada00e5c884a20829" Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.453064 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.453710 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6w48z"] Dec 05 07:20:05 crc kubenswrapper[4863]: I1205 07:20:05.458799 4863 scope.go:117] "RemoveContainer" containerID="05c8168f2c8c02fb3ad65f624146ded7747bbbb23b9e25297fb6e77837744654" Dec 05 07:20:06 crc kubenswrapper[4863]: I1205 07:20:06.618107 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" path="/var/lib/kubelet/pods/7e575574-13fc-4796-ab56-d6156d1ff1f0/volumes" Dec 05 07:20:08 crc kubenswrapper[4863]: I1205 07:20:08.464362 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:20:08 crc kubenswrapper[4863]: I1205 07:20:08.464841 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:20:09 crc kubenswrapper[4863]: I1205 07:20:09.488111 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:09 crc kubenswrapper[4863]: I1205 07:20:09.488174 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:09 crc kubenswrapper[4863]: I1205 07:20:09.533704 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:10 crc kubenswrapper[4863]: I1205 07:20:10.525196 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:10 crc kubenswrapper[4863]: I1205 07:20:10.572134 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:20:12 crc kubenswrapper[4863]: I1205 07:20:12.482965 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g5d6n" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="registry-server" containerID="cri-o://33ab21cef45a5f78a07e40474c8bee8ffbc1483730dc988a063ffd787a296312" gracePeriod=2 Dec 05 07:20:13 crc kubenswrapper[4863]: I1205 07:20:13.498214 4863 generic.go:334] "Generic (PLEG): container finished" podID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerID="33ab21cef45a5f78a07e40474c8bee8ffbc1483730dc988a063ffd787a296312" exitCode=0 Dec 05 07:20:13 crc kubenswrapper[4863]: I1205 07:20:13.498277 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerDied","Data":"33ab21cef45a5f78a07e40474c8bee8ffbc1483730dc988a063ffd787a296312"} Dec 05 07:20:13 crc kubenswrapper[4863]: I1205 07:20:13.976414 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.098262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content\") pod \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.098655 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities\") pod \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.098801 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx9md\" (UniqueName: \"kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md\") pod \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\" (UID: \"86a72fed-b5eb-4dfc-9b47-333c07ac7334\") " Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.099564 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities" (OuterVolumeSpecName: "utilities") pod "86a72fed-b5eb-4dfc-9b47-333c07ac7334" (UID: "86a72fed-b5eb-4dfc-9b47-333c07ac7334"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.107934 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md" (OuterVolumeSpecName: "kube-api-access-kx9md") pod "86a72fed-b5eb-4dfc-9b47-333c07ac7334" (UID: "86a72fed-b5eb-4dfc-9b47-333c07ac7334"). InnerVolumeSpecName "kube-api-access-kx9md". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.179960 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86a72fed-b5eb-4dfc-9b47-333c07ac7334" (UID: "86a72fed-b5eb-4dfc-9b47-333c07ac7334"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.200262 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.200304 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx9md\" (UniqueName: \"kubernetes.io/projected/86a72fed-b5eb-4dfc-9b47-333c07ac7334-kube-api-access-kx9md\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.200319 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86a72fed-b5eb-4dfc-9b47-333c07ac7334-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.510348 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5d6n" event={"ID":"86a72fed-b5eb-4dfc-9b47-333c07ac7334","Type":"ContainerDied","Data":"fc897b5a38cdd9c86058483e1f55d605e425ef1ddba8be91e7edbd3e9553c666"} Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.510403 4863 scope.go:117] "RemoveContainer" containerID="33ab21cef45a5f78a07e40474c8bee8ffbc1483730dc988a063ffd787a296312" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.510454 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5d6n" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.545314 4863 scope.go:117] "RemoveContainer" containerID="d19b9b217d0d4551bffbabeb56a5ce758b08a2975567f8887cad3e1ee4246136" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.568334 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.584019 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g5d6n"] Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.588978 4863 scope.go:117] "RemoveContainer" containerID="87588b48609b947025473efde8b2a2153e3600332dd824a718206df0372af2ce" Dec 05 07:20:14 crc kubenswrapper[4863]: I1205 07:20:14.614675 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" path="/var/lib/kubelet/pods/86a72fed-b5eb-4dfc-9b47-333c07ac7334/volumes" Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.463823 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.464410 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.464462 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.465085 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.465155 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7" gracePeriod=600 Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.732694 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7" exitCode=0 Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.732748 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7"} Dec 05 07:20:38 crc kubenswrapper[4863]: I1205 07:20:38.732798 4863 scope.go:117] "RemoveContainer" containerID="9ff7e8c864b63d2c5770f8555456229051aa5d2d1c2c04a541b7d437471751c9" Dec 05 07:20:39 crc kubenswrapper[4863]: I1205 07:20:39.746328 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678"} Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.705552 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706567 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706590 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706628 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="extract-utilities" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706641 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="extract-utilities" Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706668 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706680 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706703 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="extract-content" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706715 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="extract-content" Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706744 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="extract-content" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706757 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="extract-content" Dec 05 07:21:05 crc kubenswrapper[4863]: E1205 07:21:05.706794 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="extract-utilities" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.706805 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="extract-utilities" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.707025 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="86a72fed-b5eb-4dfc-9b47-333c07ac7334" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.707045 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e575574-13fc-4796-ab56-d6156d1ff1f0" containerName="registry-server" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.709030 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.720733 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.871840 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.872004 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.872103 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b7r5\" (UniqueName: \"kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.973548 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.973607 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b7r5\" (UniqueName: \"kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.973649 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.974073 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.974094 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:05 crc kubenswrapper[4863]: I1205 07:21:05.996319 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b7r5\" (UniqueName: \"kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5\") pod \"redhat-operators-nqsmg\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:06 crc kubenswrapper[4863]: I1205 07:21:06.045373 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:06 crc kubenswrapper[4863]: I1205 07:21:06.498795 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:06 crc kubenswrapper[4863]: I1205 07:21:06.971044 4863 generic.go:334] "Generic (PLEG): container finished" podID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerID="f1938ccfec100a86054eb6e7bf7c1122448e3ef66fd734ec61b08126131c5d30" exitCode=0 Dec 05 07:21:06 crc kubenswrapper[4863]: I1205 07:21:06.971098 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerDied","Data":"f1938ccfec100a86054eb6e7bf7c1122448e3ef66fd734ec61b08126131c5d30"} Dec 05 07:21:06 crc kubenswrapper[4863]: I1205 07:21:06.971152 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerStarted","Data":"75c559022cb1f3afecd8932b11a35edb1a8f94a417ead9d2dc3a80543675c272"} Dec 05 07:21:07 crc kubenswrapper[4863]: I1205 07:21:07.982806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerStarted","Data":"8a7ebc4f2d239437b309470f7139a349313fb274c6d5c50c8256a640e6b285e9"} Dec 05 07:21:08 crc kubenswrapper[4863]: I1205 07:21:08.997620 4863 generic.go:334] "Generic (PLEG): container finished" podID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerID="8a7ebc4f2d239437b309470f7139a349313fb274c6d5c50c8256a640e6b285e9" exitCode=0 Dec 05 07:21:08 crc kubenswrapper[4863]: I1205 07:21:08.998634 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerDied","Data":"8a7ebc4f2d239437b309470f7139a349313fb274c6d5c50c8256a640e6b285e9"} Dec 05 07:21:10 crc kubenswrapper[4863]: I1205 07:21:10.013766 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerStarted","Data":"a160046d7e450cc64d8236ed7bf0a22ec45f6cc7d7f01b5aa7dc49f70ca6b329"} Dec 05 07:21:10 crc kubenswrapper[4863]: I1205 07:21:10.049766 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nqsmg" podStartSLOduration=2.622812057 podStartE2EDuration="5.049739118s" podCreationTimestamp="2025-12-05 07:21:05 +0000 UTC" firstStartedPulling="2025-12-05 07:21:06.974490655 +0000 UTC m=+2094.700487695" lastFinishedPulling="2025-12-05 07:21:09.401417716 +0000 UTC m=+2097.127414756" observedRunningTime="2025-12-05 07:21:10.044318837 +0000 UTC m=+2097.770315887" watchObservedRunningTime="2025-12-05 07:21:10.049739118 +0000 UTC m=+2097.775736158" Dec 05 07:21:16 crc kubenswrapper[4863]: I1205 07:21:16.045728 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:16 crc kubenswrapper[4863]: I1205 07:21:16.046267 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:16 crc kubenswrapper[4863]: I1205 07:21:16.084770 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:16 crc kubenswrapper[4863]: I1205 07:21:16.126207 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:16 crc kubenswrapper[4863]: I1205 07:21:16.322090 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:18 crc kubenswrapper[4863]: I1205 07:21:18.074215 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nqsmg" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="registry-server" containerID="cri-o://a160046d7e450cc64d8236ed7bf0a22ec45f6cc7d7f01b5aa7dc49f70ca6b329" gracePeriod=2 Dec 05 07:21:19 crc kubenswrapper[4863]: I1205 07:21:19.085144 4863 generic.go:334] "Generic (PLEG): container finished" podID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerID="a160046d7e450cc64d8236ed7bf0a22ec45f6cc7d7f01b5aa7dc49f70ca6b329" exitCode=0 Dec 05 07:21:19 crc kubenswrapper[4863]: I1205 07:21:19.085230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerDied","Data":"a160046d7e450cc64d8236ed7bf0a22ec45f6cc7d7f01b5aa7dc49f70ca6b329"} Dec 05 07:21:19 crc kubenswrapper[4863]: I1205 07:21:19.868683 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.021929 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content\") pod \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.022029 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities\") pod \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.022085 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4b7r5\" (UniqueName: \"kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5\") pod \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\" (UID: \"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d\") " Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.023897 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities" (OuterVolumeSpecName: "utilities") pod "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" (UID: "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.028004 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5" (OuterVolumeSpecName: "kube-api-access-4b7r5") pod "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" (UID: "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d"). InnerVolumeSpecName "kube-api-access-4b7r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.096696 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nqsmg" event={"ID":"78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d","Type":"ContainerDied","Data":"75c559022cb1f3afecd8932b11a35edb1a8f94a417ead9d2dc3a80543675c272"} Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.096755 4863 scope.go:117] "RemoveContainer" containerID="a160046d7e450cc64d8236ed7bf0a22ec45f6cc7d7f01b5aa7dc49f70ca6b329" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.096766 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nqsmg" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.123798 4863 scope.go:117] "RemoveContainer" containerID="8a7ebc4f2d239437b309470f7139a349313fb274c6d5c50c8256a640e6b285e9" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.124332 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4b7r5\" (UniqueName: \"kubernetes.io/projected/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-kube-api-access-4b7r5\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.124358 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.153273 4863 scope.go:117] "RemoveContainer" containerID="f1938ccfec100a86054eb6e7bf7c1122448e3ef66fd734ec61b08126131c5d30" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.299974 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" (UID: "78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.327393 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.434536 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.439817 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nqsmg"] Dec 05 07:21:20 crc kubenswrapper[4863]: I1205 07:21:20.623966 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" path="/var/lib/kubelet/pods/78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d/volumes" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.458609 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:24 crc kubenswrapper[4863]: E1205 07:21:24.460393 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="registry-server" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.460525 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="registry-server" Dec 05 07:21:24 crc kubenswrapper[4863]: E1205 07:21:24.460626 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="extract-content" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.460697 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="extract-content" Dec 05 07:21:24 crc kubenswrapper[4863]: E1205 07:21:24.460775 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="extract-utilities" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.460843 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="extract-utilities" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.461108 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="78dd7cd2-f86c-4a83-a1ea-1d9d7e26456d" containerName="registry-server" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.462403 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.473231 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.588076 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.588342 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.588536 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq5xd\" (UniqueName: \"kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.689395 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq5xd\" (UniqueName: \"kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.689491 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.689514 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.689926 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.690380 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.714740 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq5xd\" (UniqueName: \"kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd\") pod \"redhat-marketplace-wvzw7\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:24 crc kubenswrapper[4863]: I1205 07:21:24.787901 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:25 crc kubenswrapper[4863]: I1205 07:21:25.250180 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:26 crc kubenswrapper[4863]: I1205 07:21:26.143937 4863 generic.go:334] "Generic (PLEG): container finished" podID="1c236b47-69cd-4f72-a827-259a86b14932" containerID="a17d27cf6151fe2ddb92499b16c5eccdfd9228840a4869927cb7866b6fe86ea1" exitCode=0 Dec 05 07:21:26 crc kubenswrapper[4863]: I1205 07:21:26.143982 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerDied","Data":"a17d27cf6151fe2ddb92499b16c5eccdfd9228840a4869927cb7866b6fe86ea1"} Dec 05 07:21:26 crc kubenswrapper[4863]: I1205 07:21:26.144007 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerStarted","Data":"b53d233127b5d498ca634e49a655686e974e8166bcd88d95b29a6dfc49f381c1"} Dec 05 07:21:27 crc kubenswrapper[4863]: I1205 07:21:27.152723 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerStarted","Data":"d2d7e239433660b62de33baba39e8cadc98e43726417d17d6de63c47c6883ad3"} Dec 05 07:21:28 crc kubenswrapper[4863]: I1205 07:21:28.161211 4863 generic.go:334] "Generic (PLEG): container finished" podID="1c236b47-69cd-4f72-a827-259a86b14932" containerID="d2d7e239433660b62de33baba39e8cadc98e43726417d17d6de63c47c6883ad3" exitCode=0 Dec 05 07:21:28 crc kubenswrapper[4863]: I1205 07:21:28.161362 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerDied","Data":"d2d7e239433660b62de33baba39e8cadc98e43726417d17d6de63c47c6883ad3"} Dec 05 07:21:29 crc kubenswrapper[4863]: I1205 07:21:29.170972 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerStarted","Data":"9a3c2bb51492fd75b3ea32be66c7827edfbd492f14e339053e4e507289214f74"} Dec 05 07:21:34 crc kubenswrapper[4863]: I1205 07:21:34.788690 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:34 crc kubenswrapper[4863]: I1205 07:21:34.789207 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:34 crc kubenswrapper[4863]: I1205 07:21:34.860747 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:34 crc kubenswrapper[4863]: I1205 07:21:34.882956 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wvzw7" podStartSLOduration=8.477819114 podStartE2EDuration="10.882934101s" podCreationTimestamp="2025-12-05 07:21:24 +0000 UTC" firstStartedPulling="2025-12-05 07:21:26.148618906 +0000 UTC m=+2113.874615946" lastFinishedPulling="2025-12-05 07:21:28.553733873 +0000 UTC m=+2116.279730933" observedRunningTime="2025-12-05 07:21:29.190380011 +0000 UTC m=+2116.916377051" watchObservedRunningTime="2025-12-05 07:21:34.882934101 +0000 UTC m=+2122.608931151" Dec 05 07:21:35 crc kubenswrapper[4863]: I1205 07:21:35.271554 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:35 crc kubenswrapper[4863]: I1205 07:21:35.328597 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:37 crc kubenswrapper[4863]: I1205 07:21:37.228837 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wvzw7" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="registry-server" containerID="cri-o://9a3c2bb51492fd75b3ea32be66c7827edfbd492f14e339053e4e507289214f74" gracePeriod=2 Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.237252 4863 generic.go:334] "Generic (PLEG): container finished" podID="1c236b47-69cd-4f72-a827-259a86b14932" containerID="9a3c2bb51492fd75b3ea32be66c7827edfbd492f14e339053e4e507289214f74" exitCode=0 Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.237283 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerDied","Data":"9a3c2bb51492fd75b3ea32be66c7827edfbd492f14e339053e4e507289214f74"} Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.308546 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.484853 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content\") pod \"1c236b47-69cd-4f72-a827-259a86b14932\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.484929 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities\") pod \"1c236b47-69cd-4f72-a827-259a86b14932\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.484960 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq5xd\" (UniqueName: \"kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd\") pod \"1c236b47-69cd-4f72-a827-259a86b14932\" (UID: \"1c236b47-69cd-4f72-a827-259a86b14932\") " Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.487451 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities" (OuterVolumeSpecName: "utilities") pod "1c236b47-69cd-4f72-a827-259a86b14932" (UID: "1c236b47-69cd-4f72-a827-259a86b14932"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.490251 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd" (OuterVolumeSpecName: "kube-api-access-sq5xd") pod "1c236b47-69cd-4f72-a827-259a86b14932" (UID: "1c236b47-69cd-4f72-a827-259a86b14932"). InnerVolumeSpecName "kube-api-access-sq5xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.506869 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c236b47-69cd-4f72-a827-259a86b14932" (UID: "1c236b47-69cd-4f72-a827-259a86b14932"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.587033 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.587072 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c236b47-69cd-4f72-a827-259a86b14932-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:38 crc kubenswrapper[4863]: I1205 07:21:38.587092 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq5xd\" (UniqueName: \"kubernetes.io/projected/1c236b47-69cd-4f72-a827-259a86b14932-kube-api-access-sq5xd\") on node \"crc\" DevicePath \"\"" Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.244982 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvzw7" event={"ID":"1c236b47-69cd-4f72-a827-259a86b14932","Type":"ContainerDied","Data":"b53d233127b5d498ca634e49a655686e974e8166bcd88d95b29a6dfc49f381c1"} Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.245074 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvzw7" Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.245309 4863 scope.go:117] "RemoveContainer" containerID="9a3c2bb51492fd75b3ea32be66c7827edfbd492f14e339053e4e507289214f74" Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.266141 4863 scope.go:117] "RemoveContainer" containerID="d2d7e239433660b62de33baba39e8cadc98e43726417d17d6de63c47c6883ad3" Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.269700 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.275318 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvzw7"] Dec 05 07:21:39 crc kubenswrapper[4863]: I1205 07:21:39.285770 4863 scope.go:117] "RemoveContainer" containerID="a17d27cf6151fe2ddb92499b16c5eccdfd9228840a4869927cb7866b6fe86ea1" Dec 05 07:21:40 crc kubenswrapper[4863]: I1205 07:21:40.612703 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c236b47-69cd-4f72-a827-259a86b14932" path="/var/lib/kubelet/pods/1c236b47-69cd-4f72-a827-259a86b14932/volumes" Dec 05 07:22:38 crc kubenswrapper[4863]: I1205 07:22:38.464728 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:22:38 crc kubenswrapper[4863]: I1205 07:22:38.465306 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:23:08 crc kubenswrapper[4863]: I1205 07:23:08.463861 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:23:08 crc kubenswrapper[4863]: I1205 07:23:08.464766 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:23:38 crc kubenswrapper[4863]: I1205 07:23:38.463989 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:23:38 crc kubenswrapper[4863]: I1205 07:23:38.464944 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:23:38 crc kubenswrapper[4863]: I1205 07:23:38.465024 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:23:38 crc kubenswrapper[4863]: I1205 07:23:38.466159 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:23:38 crc kubenswrapper[4863]: I1205 07:23:38.466585 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" gracePeriod=600 Dec 05 07:23:38 crc kubenswrapper[4863]: E1205 07:23:38.608355 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:23:39 crc kubenswrapper[4863]: I1205 07:23:39.184783 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" exitCode=0 Dec 05 07:23:39 crc kubenswrapper[4863]: I1205 07:23:39.184847 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678"} Dec 05 07:23:39 crc kubenswrapper[4863]: I1205 07:23:39.184883 4863 scope.go:117] "RemoveContainer" containerID="f9fd56d89a254d2b3e126e9d692a796e953eae998d9598b0c65ec31554871cb7" Dec 05 07:23:39 crc kubenswrapper[4863]: I1205 07:23:39.185578 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:23:39 crc kubenswrapper[4863]: E1205 07:23:39.186027 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:23:50 crc kubenswrapper[4863]: I1205 07:23:50.601926 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:23:50 crc kubenswrapper[4863]: E1205 07:23:50.602677 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:24:04 crc kubenswrapper[4863]: I1205 07:24:04.603600 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:24:04 crc kubenswrapper[4863]: E1205 07:24:04.604845 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:24:17 crc kubenswrapper[4863]: I1205 07:24:17.603004 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:24:17 crc kubenswrapper[4863]: E1205 07:24:17.604383 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:24:31 crc kubenswrapper[4863]: I1205 07:24:31.602137 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:24:31 crc kubenswrapper[4863]: E1205 07:24:31.603035 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:24:44 crc kubenswrapper[4863]: I1205 07:24:44.602711 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:24:44 crc kubenswrapper[4863]: E1205 07:24:44.603640 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:24:59 crc kubenswrapper[4863]: I1205 07:24:59.601865 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:24:59 crc kubenswrapper[4863]: E1205 07:24:59.602885 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:25:14 crc kubenswrapper[4863]: I1205 07:25:14.602012 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:25:14 crc kubenswrapper[4863]: E1205 07:25:14.602906 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:25:26 crc kubenswrapper[4863]: I1205 07:25:26.601688 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:25:26 crc kubenswrapper[4863]: E1205 07:25:26.602427 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:25:40 crc kubenswrapper[4863]: I1205 07:25:40.602556 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:25:40 crc kubenswrapper[4863]: E1205 07:25:40.603856 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:25:52 crc kubenswrapper[4863]: I1205 07:25:52.609872 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:25:52 crc kubenswrapper[4863]: E1205 07:25:52.610695 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:26:06 crc kubenswrapper[4863]: I1205 07:26:06.602590 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:26:06 crc kubenswrapper[4863]: E1205 07:26:06.603715 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:26:17 crc kubenswrapper[4863]: I1205 07:26:17.602174 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:26:17 crc kubenswrapper[4863]: E1205 07:26:17.603183 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:26:32 crc kubenswrapper[4863]: I1205 07:26:32.607594 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:26:32 crc kubenswrapper[4863]: E1205 07:26:32.608744 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:26:45 crc kubenswrapper[4863]: I1205 07:26:45.602525 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:26:45 crc kubenswrapper[4863]: E1205 07:26:45.603732 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:26:58 crc kubenswrapper[4863]: I1205 07:26:58.602428 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:26:58 crc kubenswrapper[4863]: E1205 07:26:58.603186 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:27:09 crc kubenswrapper[4863]: I1205 07:27:09.602139 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:27:09 crc kubenswrapper[4863]: E1205 07:27:09.602916 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:27:22 crc kubenswrapper[4863]: I1205 07:27:22.607705 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:27:22 crc kubenswrapper[4863]: E1205 07:27:22.608574 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:27:37 crc kubenswrapper[4863]: I1205 07:27:37.602193 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:27:37 crc kubenswrapper[4863]: E1205 07:27:37.603354 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:27:49 crc kubenswrapper[4863]: I1205 07:27:49.612181 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:27:49 crc kubenswrapper[4863]: E1205 07:27:49.616398 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:28:02 crc kubenswrapper[4863]: I1205 07:28:02.610344 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:28:02 crc kubenswrapper[4863]: E1205 07:28:02.611461 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:28:14 crc kubenswrapper[4863]: I1205 07:28:14.603064 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:28:14 crc kubenswrapper[4863]: E1205 07:28:14.604534 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:28:28 crc kubenswrapper[4863]: I1205 07:28:28.602623 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:28:28 crc kubenswrapper[4863]: E1205 07:28:28.603696 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:28:39 crc kubenswrapper[4863]: I1205 07:28:39.602331 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:28:40 crc kubenswrapper[4863]: I1205 07:28:40.814484 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c"} Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.159538 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p"] Dec 05 07:30:00 crc kubenswrapper[4863]: E1205 07:30:00.161006 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="registry-server" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.161039 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="registry-server" Dec 05 07:30:00 crc kubenswrapper[4863]: E1205 07:30:00.161068 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="extract-utilities" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.161087 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="extract-utilities" Dec 05 07:30:00 crc kubenswrapper[4863]: E1205 07:30:00.161124 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="extract-content" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.161144 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="extract-content" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.161517 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c236b47-69cd-4f72-a827-259a86b14932" containerName="registry-server" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.162529 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.166880 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.167225 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.172125 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p"] Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.313854 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.313944 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m6ld\" (UniqueName: \"kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.313985 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.415014 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.415132 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m6ld\" (UniqueName: \"kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.415195 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.416807 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.425431 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.446097 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m6ld\" (UniqueName: \"kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld\") pod \"collect-profiles-29415330-dq74p\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.494391 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:00 crc kubenswrapper[4863]: I1205 07:30:00.906165 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p"] Dec 05 07:30:01 crc kubenswrapper[4863]: I1205 07:30:01.514393 4863 generic.go:334] "Generic (PLEG): container finished" podID="487cc8d4-cf82-4675-810c-7505a5a9ed13" containerID="22562f2697907e8928049cf9c8db8109a2ba4fe927a106fc74de99412ba4d663" exitCode=0 Dec 05 07:30:01 crc kubenswrapper[4863]: I1205 07:30:01.514506 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" event={"ID":"487cc8d4-cf82-4675-810c-7505a5a9ed13","Type":"ContainerDied","Data":"22562f2697907e8928049cf9c8db8109a2ba4fe927a106fc74de99412ba4d663"} Dec 05 07:30:01 crc kubenswrapper[4863]: I1205 07:30:01.514751 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" event={"ID":"487cc8d4-cf82-4675-810c-7505a5a9ed13","Type":"ContainerStarted","Data":"30e318681494213b8b524ed70e367fe2294b22a5563cf4f34529e2c6ee5c0ae8"} Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.862397 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.970953 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume\") pod \"487cc8d4-cf82-4675-810c-7505a5a9ed13\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.971097 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume\") pod \"487cc8d4-cf82-4675-810c-7505a5a9ed13\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.971167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m6ld\" (UniqueName: \"kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld\") pod \"487cc8d4-cf82-4675-810c-7505a5a9ed13\" (UID: \"487cc8d4-cf82-4675-810c-7505a5a9ed13\") " Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.971851 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume" (OuterVolumeSpecName: "config-volume") pod "487cc8d4-cf82-4675-810c-7505a5a9ed13" (UID: "487cc8d4-cf82-4675-810c-7505a5a9ed13"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.976095 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "487cc8d4-cf82-4675-810c-7505a5a9ed13" (UID: "487cc8d4-cf82-4675-810c-7505a5a9ed13"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:30:02 crc kubenswrapper[4863]: I1205 07:30:02.976190 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld" (OuterVolumeSpecName: "kube-api-access-7m6ld") pod "487cc8d4-cf82-4675-810c-7505a5a9ed13" (UID: "487cc8d4-cf82-4675-810c-7505a5a9ed13"). InnerVolumeSpecName "kube-api-access-7m6ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.073113 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/487cc8d4-cf82-4675-810c-7505a5a9ed13-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.073164 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m6ld\" (UniqueName: \"kubernetes.io/projected/487cc8d4-cf82-4675-810c-7505a5a9ed13-kube-api-access-7m6ld\") on node \"crc\" DevicePath \"\"" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.073187 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/487cc8d4-cf82-4675-810c-7505a5a9ed13-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.534795 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" event={"ID":"487cc8d4-cf82-4675-810c-7505a5a9ed13","Type":"ContainerDied","Data":"30e318681494213b8b524ed70e367fe2294b22a5563cf4f34529e2c6ee5c0ae8"} Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.534837 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30e318681494213b8b524ed70e367fe2294b22a5563cf4f34529e2c6ee5c0ae8" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.535024 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p" Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.957485 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn"] Dec 05 07:30:03 crc kubenswrapper[4863]: I1205 07:30:03.964395 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-phcfn"] Dec 05 07:30:04 crc kubenswrapper[4863]: I1205 07:30:04.614263 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7325423-fd50-4f8b-8a2f-a2e06eaaef6e" path="/var/lib/kubelet/pods/b7325423-fd50-4f8b-8a2f-a2e06eaaef6e/volumes" Dec 05 07:30:25 crc kubenswrapper[4863]: I1205 07:30:25.163379 4863 scope.go:117] "RemoveContainer" containerID="8f3abe5f3e43ce3cf2934e6678cd54617b0512f044e18a2e7923bbe209fd0853" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.455583 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:30:50 crc kubenswrapper[4863]: E1205 07:30:50.456755 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="487cc8d4-cf82-4675-810c-7505a5a9ed13" containerName="collect-profiles" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.456798 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="487cc8d4-cf82-4675-810c-7505a5a9ed13" containerName="collect-profiles" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.457087 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="487cc8d4-cf82-4675-810c-7505a5a9ed13" containerName="collect-profiles" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.458400 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.475008 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.525785 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.526370 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.526610 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgfmt\" (UniqueName: \"kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.628740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.628798 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.628902 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgfmt\" (UniqueName: \"kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.629222 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.629632 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.649073 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgfmt\" (UniqueName: \"kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt\") pod \"certified-operators-nhv84\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:50 crc kubenswrapper[4863]: I1205 07:30:50.825646 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:30:51 crc kubenswrapper[4863]: I1205 07:30:51.257124 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:30:52 crc kubenswrapper[4863]: I1205 07:30:52.027134 4863 generic.go:334] "Generic (PLEG): container finished" podID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerID="7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca" exitCode=0 Dec 05 07:30:52 crc kubenswrapper[4863]: I1205 07:30:52.027219 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerDied","Data":"7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca"} Dec 05 07:30:52 crc kubenswrapper[4863]: I1205 07:30:52.027282 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerStarted","Data":"b4bec6aa6577ec980f1830c6afe6d24e89751f66bb415ce5b297a23c9ac8ba8d"} Dec 05 07:30:52 crc kubenswrapper[4863]: I1205 07:30:52.034170 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:30:53 crc kubenswrapper[4863]: I1205 07:30:53.035555 4863 generic.go:334] "Generic (PLEG): container finished" podID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerID="1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc" exitCode=0 Dec 05 07:30:53 crc kubenswrapper[4863]: I1205 07:30:53.035671 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerDied","Data":"1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc"} Dec 05 07:30:54 crc kubenswrapper[4863]: I1205 07:30:54.050760 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerStarted","Data":"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9"} Dec 05 07:30:54 crc kubenswrapper[4863]: I1205 07:30:54.071601 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nhv84" podStartSLOduration=2.594097801 podStartE2EDuration="4.07158006s" podCreationTimestamp="2025-12-05 07:30:50 +0000 UTC" firstStartedPulling="2025-12-05 07:30:52.033877082 +0000 UTC m=+2679.759874132" lastFinishedPulling="2025-12-05 07:30:53.511359311 +0000 UTC m=+2681.237356391" observedRunningTime="2025-12-05 07:30:54.069455818 +0000 UTC m=+2681.795452848" watchObservedRunningTime="2025-12-05 07:30:54.07158006 +0000 UTC m=+2681.797577110" Dec 05 07:31:00 crc kubenswrapper[4863]: I1205 07:31:00.826746 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:00 crc kubenswrapper[4863]: I1205 07:31:00.827222 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:00 crc kubenswrapper[4863]: I1205 07:31:00.894356 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:01 crc kubenswrapper[4863]: I1205 07:31:01.206454 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:01 crc kubenswrapper[4863]: I1205 07:31:01.274807 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:31:03 crc kubenswrapper[4863]: I1205 07:31:03.145740 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nhv84" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="registry-server" containerID="cri-o://63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9" gracePeriod=2 Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.104030 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.169159 4863 generic.go:334] "Generic (PLEG): container finished" podID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerID="63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9" exitCode=0 Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.169207 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerDied","Data":"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9"} Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.169238 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhv84" event={"ID":"a0fb486e-118b-470e-9e64-76042ae60b9f","Type":"ContainerDied","Data":"b4bec6aa6577ec980f1830c6afe6d24e89751f66bb415ce5b297a23c9ac8ba8d"} Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.169254 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhv84" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.169258 4863 scope.go:117] "RemoveContainer" containerID="63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.190020 4863 scope.go:117] "RemoveContainer" containerID="1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.209252 4863 scope.go:117] "RemoveContainer" containerID="7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.225643 4863 scope.go:117] "RemoveContainer" containerID="63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9" Dec 05 07:31:04 crc kubenswrapper[4863]: E1205 07:31:04.225954 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9\": container with ID starting with 63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9 not found: ID does not exist" containerID="63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.225985 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9"} err="failed to get container status \"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9\": rpc error: code = NotFound desc = could not find container \"63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9\": container with ID starting with 63a5eb45ba8b7056f4c74c316d6ab7f1b6ed75dbaf8979b45363c5a794448cf9 not found: ID does not exist" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.226005 4863 scope.go:117] "RemoveContainer" containerID="1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc" Dec 05 07:31:04 crc kubenswrapper[4863]: E1205 07:31:04.226448 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc\": container with ID starting with 1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc not found: ID does not exist" containerID="1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.226486 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc"} err="failed to get container status \"1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc\": rpc error: code = NotFound desc = could not find container \"1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc\": container with ID starting with 1594f597b684d993f8647412616f04aa95ed2fd46b65cd7f0a6a486ecb0bc5dc not found: ID does not exist" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.226505 4863 scope.go:117] "RemoveContainer" containerID="7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca" Dec 05 07:31:04 crc kubenswrapper[4863]: E1205 07:31:04.226734 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca\": container with ID starting with 7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca not found: ID does not exist" containerID="7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.226758 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca"} err="failed to get container status \"7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca\": rpc error: code = NotFound desc = could not find container \"7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca\": container with ID starting with 7fbd5c2f61d799ee71a9fa408d88826494b21fcbb769d0504ce33f45ebd899ca not found: ID does not exist" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.245421 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities\") pod \"a0fb486e-118b-470e-9e64-76042ae60b9f\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.245482 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgfmt\" (UniqueName: \"kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt\") pod \"a0fb486e-118b-470e-9e64-76042ae60b9f\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.245580 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content\") pod \"a0fb486e-118b-470e-9e64-76042ae60b9f\" (UID: \"a0fb486e-118b-470e-9e64-76042ae60b9f\") " Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.246707 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities" (OuterVolumeSpecName: "utilities") pod "a0fb486e-118b-470e-9e64-76042ae60b9f" (UID: "a0fb486e-118b-470e-9e64-76042ae60b9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.251789 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt" (OuterVolumeSpecName: "kube-api-access-xgfmt") pod "a0fb486e-118b-470e-9e64-76042ae60b9f" (UID: "a0fb486e-118b-470e-9e64-76042ae60b9f"). InnerVolumeSpecName "kube-api-access-xgfmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.293692 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0fb486e-118b-470e-9e64-76042ae60b9f" (UID: "a0fb486e-118b-470e-9e64-76042ae60b9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.347847 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.347896 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0fb486e-118b-470e-9e64-76042ae60b9f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.347915 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgfmt\" (UniqueName: \"kubernetes.io/projected/a0fb486e-118b-470e-9e64-76042ae60b9f-kube-api-access-xgfmt\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.510546 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.526587 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nhv84"] Dec 05 07:31:04 crc kubenswrapper[4863]: I1205 07:31:04.611366 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" path="/var/lib/kubelet/pods/a0fb486e-118b-470e-9e64-76042ae60b9f/volumes" Dec 05 07:31:08 crc kubenswrapper[4863]: I1205 07:31:08.464104 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:31:08 crc kubenswrapper[4863]: I1205 07:31:08.466107 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.729963 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:25 crc kubenswrapper[4863]: E1205 07:31:25.731202 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="extract-content" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.731227 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="extract-content" Dec 05 07:31:25 crc kubenswrapper[4863]: E1205 07:31:25.731256 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="extract-utilities" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.731270 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="extract-utilities" Dec 05 07:31:25 crc kubenswrapper[4863]: E1205 07:31:25.731292 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="registry-server" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.731305 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="registry-server" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.731615 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0fb486e-118b-470e-9e64-76042ae60b9f" containerName="registry-server" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.733565 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.744196 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.880519 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.880613 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fdpp\" (UniqueName: \"kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.880674 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.982258 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.982352 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fdpp\" (UniqueName: \"kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.982407 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.982922 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:25 crc kubenswrapper[4863]: I1205 07:31:25.982986 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:26 crc kubenswrapper[4863]: I1205 07:31:26.015547 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fdpp\" (UniqueName: \"kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp\") pod \"redhat-marketplace-tg7wp\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:26 crc kubenswrapper[4863]: I1205 07:31:26.096502 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:26 crc kubenswrapper[4863]: I1205 07:31:26.532043 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:27 crc kubenswrapper[4863]: I1205 07:31:27.415313 4863 generic.go:334] "Generic (PLEG): container finished" podID="35189f44-767b-45d6-a71e-d3631d7cd102" containerID="bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52" exitCode=0 Dec 05 07:31:27 crc kubenswrapper[4863]: I1205 07:31:27.415372 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerDied","Data":"bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52"} Dec 05 07:31:27 crc kubenswrapper[4863]: I1205 07:31:27.415414 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerStarted","Data":"7add9fd6c33a2dc2c75883e616216d3e5084c96be969f413c421904e5565a362"} Dec 05 07:31:28 crc kubenswrapper[4863]: I1205 07:31:28.432731 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerStarted","Data":"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215"} Dec 05 07:31:29 crc kubenswrapper[4863]: I1205 07:31:29.451252 4863 generic.go:334] "Generic (PLEG): container finished" podID="35189f44-767b-45d6-a71e-d3631d7cd102" containerID="6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215" exitCode=0 Dec 05 07:31:29 crc kubenswrapper[4863]: I1205 07:31:29.451322 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerDied","Data":"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215"} Dec 05 07:31:30 crc kubenswrapper[4863]: I1205 07:31:30.461329 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerStarted","Data":"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235"} Dec 05 07:31:30 crc kubenswrapper[4863]: I1205 07:31:30.488909 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tg7wp" podStartSLOduration=3.042787847 podStartE2EDuration="5.488881638s" podCreationTimestamp="2025-12-05 07:31:25 +0000 UTC" firstStartedPulling="2025-12-05 07:31:27.417443809 +0000 UTC m=+2715.143440849" lastFinishedPulling="2025-12-05 07:31:29.8635376 +0000 UTC m=+2717.589534640" observedRunningTime="2025-12-05 07:31:30.477608013 +0000 UTC m=+2718.203605083" watchObservedRunningTime="2025-12-05 07:31:30.488881638 +0000 UTC m=+2718.214878708" Dec 05 07:31:36 crc kubenswrapper[4863]: I1205 07:31:36.097431 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:36 crc kubenswrapper[4863]: I1205 07:31:36.098761 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:36 crc kubenswrapper[4863]: I1205 07:31:36.180071 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:36 crc kubenswrapper[4863]: I1205 07:31:36.581818 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:36 crc kubenswrapper[4863]: I1205 07:31:36.642062 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:38 crc kubenswrapper[4863]: I1205 07:31:38.464329 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:31:38 crc kubenswrapper[4863]: I1205 07:31:38.464879 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:31:38 crc kubenswrapper[4863]: I1205 07:31:38.524778 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tg7wp" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="registry-server" containerID="cri-o://ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235" gracePeriod=2 Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.478261 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.551003 4863 generic.go:334] "Generic (PLEG): container finished" podID="35189f44-767b-45d6-a71e-d3631d7cd102" containerID="ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235" exitCode=0 Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.551051 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerDied","Data":"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235"} Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.551073 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tg7wp" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.551610 4863 scope.go:117] "RemoveContainer" containerID="ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.551945 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tg7wp" event={"ID":"35189f44-767b-45d6-a71e-d3631d7cd102","Type":"ContainerDied","Data":"7add9fd6c33a2dc2c75883e616216d3e5084c96be969f413c421904e5565a362"} Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.578160 4863 scope.go:117] "RemoveContainer" containerID="6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.606119 4863 scope.go:117] "RemoveContainer" containerID="bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.643571 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fdpp\" (UniqueName: \"kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp\") pod \"35189f44-767b-45d6-a71e-d3631d7cd102\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.643623 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content\") pod \"35189f44-767b-45d6-a71e-d3631d7cd102\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.643706 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities\") pod \"35189f44-767b-45d6-a71e-d3631d7cd102\" (UID: \"35189f44-767b-45d6-a71e-d3631d7cd102\") " Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.644899 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities" (OuterVolumeSpecName: "utilities") pod "35189f44-767b-45d6-a71e-d3631d7cd102" (UID: "35189f44-767b-45d6-a71e-d3631d7cd102"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.652852 4863 scope.go:117] "RemoveContainer" containerID="ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.652862 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp" (OuterVolumeSpecName: "kube-api-access-7fdpp") pod "35189f44-767b-45d6-a71e-d3631d7cd102" (UID: "35189f44-767b-45d6-a71e-d3631d7cd102"). InnerVolumeSpecName "kube-api-access-7fdpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:31:39 crc kubenswrapper[4863]: E1205 07:31:39.653394 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235\": container with ID starting with ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235 not found: ID does not exist" containerID="ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.653431 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235"} err="failed to get container status \"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235\": rpc error: code = NotFound desc = could not find container \"ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235\": container with ID starting with ad0e7ece662e0d45a6a6bcd692a924e6d95ff37e04572b649673af2aa2d5f235 not found: ID does not exist" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.653458 4863 scope.go:117] "RemoveContainer" containerID="6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215" Dec 05 07:31:39 crc kubenswrapper[4863]: E1205 07:31:39.654121 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215\": container with ID starting with 6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215 not found: ID does not exist" containerID="6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.654158 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215"} err="failed to get container status \"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215\": rpc error: code = NotFound desc = could not find container \"6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215\": container with ID starting with 6f98ca8ddb4dce33eb91bcd103f19106169609ba816b820f2be0baff6aa7a215 not found: ID does not exist" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.654179 4863 scope.go:117] "RemoveContainer" containerID="bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52" Dec 05 07:31:39 crc kubenswrapper[4863]: E1205 07:31:39.654706 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52\": container with ID starting with bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52 not found: ID does not exist" containerID="bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.654736 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52"} err="failed to get container status \"bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52\": rpc error: code = NotFound desc = could not find container \"bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52\": container with ID starting with bfb854674a6b52885e5a9d1fe851f2b7135ca452e51372a50c08451935128a52 not found: ID does not exist" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.665329 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35189f44-767b-45d6-a71e-d3631d7cd102" (UID: "35189f44-767b-45d6-a71e-d3631d7cd102"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.745495 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.745740 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fdpp\" (UniqueName: \"kubernetes.io/projected/35189f44-767b-45d6-a71e-d3631d7cd102-kube-api-access-7fdpp\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.745853 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35189f44-767b-45d6-a71e-d3631d7cd102-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.898310 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:39 crc kubenswrapper[4863]: I1205 07:31:39.907370 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tg7wp"] Dec 05 07:31:40 crc kubenswrapper[4863]: I1205 07:31:40.617357 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" path="/var/lib/kubelet/pods/35189f44-767b-45d6-a71e-d3631d7cd102/volumes" Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.465244 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.465910 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.465986 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.466911 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.466996 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c" gracePeriod=600 Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.800233 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c" exitCode=0 Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.800747 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c"} Dec 05 07:32:08 crc kubenswrapper[4863]: I1205 07:32:08.801697 4863 scope.go:117] "RemoveContainer" containerID="7c9a77ecf01efda3679fb191e7a54831db5295efaf80abb8b3d7c871b7130678" Dec 05 07:32:09 crc kubenswrapper[4863]: I1205 07:32:09.812667 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360"} Dec 05 07:34:08 crc kubenswrapper[4863]: I1205 07:34:08.464847 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:34:08 crc kubenswrapper[4863]: I1205 07:34:08.465425 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:34:38 crc kubenswrapper[4863]: I1205 07:34:38.464182 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:34:38 crc kubenswrapper[4863]: I1205 07:34:38.465034 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:35:08 crc kubenswrapper[4863]: I1205 07:35:08.464665 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:35:08 crc kubenswrapper[4863]: I1205 07:35:08.465426 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:35:08 crc kubenswrapper[4863]: I1205 07:35:08.465531 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:35:08 crc kubenswrapper[4863]: I1205 07:35:08.466350 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:35:08 crc kubenswrapper[4863]: I1205 07:35:08.466456 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" gracePeriod=600 Dec 05 07:35:08 crc kubenswrapper[4863]: E1205 07:35:08.607379 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:35:09 crc kubenswrapper[4863]: I1205 07:35:09.423239 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" exitCode=0 Dec 05 07:35:09 crc kubenswrapper[4863]: I1205 07:35:09.423789 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360"} Dec 05 07:35:09 crc kubenswrapper[4863]: I1205 07:35:09.424003 4863 scope.go:117] "RemoveContainer" containerID="bb86e10827cff69304a935256df119a57d85223194633210492265976e626f5c" Dec 05 07:35:09 crc kubenswrapper[4863]: I1205 07:35:09.424920 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:35:09 crc kubenswrapper[4863]: E1205 07:35:09.425352 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:35:20 crc kubenswrapper[4863]: I1205 07:35:20.602129 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:35:20 crc kubenswrapper[4863]: E1205 07:35:20.603390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:35:31 crc kubenswrapper[4863]: I1205 07:35:31.602173 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:35:31 crc kubenswrapper[4863]: E1205 07:35:31.605155 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:35:43 crc kubenswrapper[4863]: I1205 07:35:43.602044 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:35:43 crc kubenswrapper[4863]: E1205 07:35:43.602949 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:35:57 crc kubenswrapper[4863]: I1205 07:35:57.602951 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:35:57 crc kubenswrapper[4863]: E1205 07:35:57.604036 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:36:12 crc kubenswrapper[4863]: I1205 07:36:12.614426 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:36:12 crc kubenswrapper[4863]: E1205 07:36:12.615435 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:36:27 crc kubenswrapper[4863]: I1205 07:36:27.601974 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:36:27 crc kubenswrapper[4863]: E1205 07:36:27.602978 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:36:38 crc kubenswrapper[4863]: I1205 07:36:38.602934 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:36:38 crc kubenswrapper[4863]: E1205 07:36:38.604274 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:36:49 crc kubenswrapper[4863]: I1205 07:36:49.602878 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:36:49 crc kubenswrapper[4863]: E1205 07:36:49.604090 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:01 crc kubenswrapper[4863]: I1205 07:37:01.601505 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:37:01 crc kubenswrapper[4863]: E1205 07:37:01.602456 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:16 crc kubenswrapper[4863]: I1205 07:37:16.601936 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:37:16 crc kubenswrapper[4863]: E1205 07:37:16.604505 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:28 crc kubenswrapper[4863]: I1205 07:37:28.602779 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:37:28 crc kubenswrapper[4863]: E1205 07:37:28.604144 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:43 crc kubenswrapper[4863]: I1205 07:37:43.602713 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:37:43 crc kubenswrapper[4863]: E1205 07:37:43.603687 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.020556 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 07:37:55 crc kubenswrapper[4863]: E1205 07:37:55.021622 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="extract-utilities" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.021643 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="extract-utilities" Dec 05 07:37:55 crc kubenswrapper[4863]: E1205 07:37:55.021685 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="registry-server" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.021697 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="registry-server" Dec 05 07:37:55 crc kubenswrapper[4863]: E1205 07:37:55.021709 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="extract-content" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.021723 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="extract-content" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.021969 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="35189f44-767b-45d6-a71e-d3631d7cd102" containerName="registry-server" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.023445 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.045818 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.145127 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8cbm\" (UniqueName: \"kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.145222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.145319 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.246763 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.246885 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.246941 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8cbm\" (UniqueName: \"kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.247860 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.248223 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.279882 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8cbm\" (UniqueName: \"kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm\") pod \"community-operators-4b4mm\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.349791 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.602287 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:37:55 crc kubenswrapper[4863]: E1205 07:37:55.602539 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.603747 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.924330 4863 generic.go:334] "Generic (PLEG): container finished" podID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerID="f505a82e94b25fcbfec8a1a093517ef50c86ae93e8a672da550521d00b459974" exitCode=0 Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.924374 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerDied","Data":"f505a82e94b25fcbfec8a1a093517ef50c86ae93e8a672da550521d00b459974"} Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.924609 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerStarted","Data":"4ec4454cc983a37406a183fa76e9416374e01b2cfee1383547155ad4a5efd2e4"} Dec 05 07:37:55 crc kubenswrapper[4863]: I1205 07:37:55.925861 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.014798 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.016976 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.030424 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.196545 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b95h2\" (UniqueName: \"kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.196637 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.196890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.298244 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.298355 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b95h2\" (UniqueName: \"kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.298384 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.298941 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.299397 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.326368 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b95h2\" (UniqueName: \"kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2\") pod \"redhat-operators-bnk8n\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:37:58 crc kubenswrapper[4863]: I1205 07:37:58.339158 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:00 crc kubenswrapper[4863]: W1205 07:38:00.170003 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1847c2d9_5acc_4856_aeb4_3137e3f781b6.slice/crio-aa58af07d1896a747b04214f8b4cdf455e8d9dd55924ff6cb9b232617645f7db WatchSource:0}: Error finding container aa58af07d1896a747b04214f8b4cdf455e8d9dd55924ff6cb9b232617645f7db: Status 404 returned error can't find the container with id aa58af07d1896a747b04214f8b4cdf455e8d9dd55924ff6cb9b232617645f7db Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.181073 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.966675 4863 generic.go:334] "Generic (PLEG): container finished" podID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerID="1e512f05e5ca9ae327a69e26c2f120edabb59bd5c9e247b2f6a3a56c1a107800" exitCode=0 Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.967135 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerDied","Data":"1e512f05e5ca9ae327a69e26c2f120edabb59bd5c9e247b2f6a3a56c1a107800"} Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.967208 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerStarted","Data":"aa58af07d1896a747b04214f8b4cdf455e8d9dd55924ff6cb9b232617645f7db"} Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.969655 4863 generic.go:334] "Generic (PLEG): container finished" podID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerID="c39623f5979967ad92ef826c27d7d12712f02456319390d0dd94cbbadcfa2562" exitCode=0 Dec 05 07:38:00 crc kubenswrapper[4863]: I1205 07:38:00.969697 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerDied","Data":"c39623f5979967ad92ef826c27d7d12712f02456319390d0dd94cbbadcfa2562"} Dec 05 07:38:01 crc kubenswrapper[4863]: I1205 07:38:01.980352 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerStarted","Data":"f41040028da9297291e4bc0b73c0cff8e9dde3c424a7dbfe47794dad2f37f30a"} Dec 05 07:38:01 crc kubenswrapper[4863]: I1205 07:38:01.984023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerStarted","Data":"b99887a54c58e691df1f103769e7e0dc4e4ff3a7d6ef383b1b39afa71ca651fd"} Dec 05 07:38:02 crc kubenswrapper[4863]: I1205 07:38:02.044543 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4b4mm" podStartSLOduration=2.55553039 podStartE2EDuration="8.044528243s" podCreationTimestamp="2025-12-05 07:37:54 +0000 UTC" firstStartedPulling="2025-12-05 07:37:55.92560714 +0000 UTC m=+3103.651604170" lastFinishedPulling="2025-12-05 07:38:01.414604953 +0000 UTC m=+3109.140602023" observedRunningTime="2025-12-05 07:38:02.043029426 +0000 UTC m=+3109.769026486" watchObservedRunningTime="2025-12-05 07:38:02.044528243 +0000 UTC m=+3109.770525283" Dec 05 07:38:02 crc kubenswrapper[4863]: I1205 07:38:02.991919 4863 generic.go:334] "Generic (PLEG): container finished" podID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerID="f41040028da9297291e4bc0b73c0cff8e9dde3c424a7dbfe47794dad2f37f30a" exitCode=0 Dec 05 07:38:02 crc kubenswrapper[4863]: I1205 07:38:02.991955 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerDied","Data":"f41040028da9297291e4bc0b73c0cff8e9dde3c424a7dbfe47794dad2f37f30a"} Dec 05 07:38:03 crc kubenswrapper[4863]: I1205 07:38:03.999508 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerStarted","Data":"e4574fe6753b0f779250a7b43a48d7edfea174f4912aab557f3c7d39fd37b733"} Dec 05 07:38:04 crc kubenswrapper[4863]: I1205 07:38:04.021633 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bnk8n" podStartSLOduration=4.578425176 podStartE2EDuration="7.021615066s" podCreationTimestamp="2025-12-05 07:37:57 +0000 UTC" firstStartedPulling="2025-12-05 07:38:00.970566935 +0000 UTC m=+3108.696564015" lastFinishedPulling="2025-12-05 07:38:03.413756855 +0000 UTC m=+3111.139753905" observedRunningTime="2025-12-05 07:38:04.018772396 +0000 UTC m=+3111.744769456" watchObservedRunningTime="2025-12-05 07:38:04.021615066 +0000 UTC m=+3111.747612106" Dec 05 07:38:05 crc kubenswrapper[4863]: I1205 07:38:05.350579 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:38:05 crc kubenswrapper[4863]: I1205 07:38:05.350901 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:38:05 crc kubenswrapper[4863]: I1205 07:38:05.407581 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:38:06 crc kubenswrapper[4863]: I1205 07:38:06.068013 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 07:38:07 crc kubenswrapper[4863]: I1205 07:38:07.020404 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 07:38:07 crc kubenswrapper[4863]: I1205 07:38:07.211202 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 07:38:07 crc kubenswrapper[4863]: I1205 07:38:07.211505 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9p9x6" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="registry-server" containerID="cri-o://41a188b632d2ee10101dc5ded6e8c77b9192b92844cadb48a8eb89e24d709486" gracePeriod=2 Dec 05 07:38:08 crc kubenswrapper[4863]: I1205 07:38:08.340144 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:08 crc kubenswrapper[4863]: I1205 07:38:08.340229 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.042109 4863 generic.go:334] "Generic (PLEG): container finished" podID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerID="41a188b632d2ee10101dc5ded6e8c77b9192b92844cadb48a8eb89e24d709486" exitCode=0 Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.042196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerDied","Data":"41a188b632d2ee10101dc5ded6e8c77b9192b92844cadb48a8eb89e24d709486"} Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.429980 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bnk8n" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="registry-server" probeResult="failure" output=< Dec 05 07:38:09 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 07:38:09 crc kubenswrapper[4863]: > Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.556984 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.667825 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bb7b\" (UniqueName: \"kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b\") pod \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.667926 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities\") pod \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.668057 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content\") pod \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\" (UID: \"9b8f6f97-ba75-4706-aaef-cbaf00e0b338\") " Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.672716 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities" (OuterVolumeSpecName: "utilities") pod "9b8f6f97-ba75-4706-aaef-cbaf00e0b338" (UID: "9b8f6f97-ba75-4706-aaef-cbaf00e0b338"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.674096 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b" (OuterVolumeSpecName: "kube-api-access-4bb7b") pod "9b8f6f97-ba75-4706-aaef-cbaf00e0b338" (UID: "9b8f6f97-ba75-4706-aaef-cbaf00e0b338"). InnerVolumeSpecName "kube-api-access-4bb7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.717214 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b8f6f97-ba75-4706-aaef-cbaf00e0b338" (UID: "9b8f6f97-ba75-4706-aaef-cbaf00e0b338"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.773956 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.774515 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:09 crc kubenswrapper[4863]: I1205 07:38:09.774594 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bb7b\" (UniqueName: \"kubernetes.io/projected/9b8f6f97-ba75-4706-aaef-cbaf00e0b338-kube-api-access-4bb7b\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.055806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9p9x6" event={"ID":"9b8f6f97-ba75-4706-aaef-cbaf00e0b338","Type":"ContainerDied","Data":"be5329e8720f357007b80b61d22a578d02dcb1ebab26a22366ca3d7dcd5463a0"} Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.055871 4863 scope.go:117] "RemoveContainer" containerID="41a188b632d2ee10101dc5ded6e8c77b9192b92844cadb48a8eb89e24d709486" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.055932 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9p9x6" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.098946 4863 scope.go:117] "RemoveContainer" containerID="827ab0b477d92a39b9d2d819a8535b0efa44fe45bb27c198d734bcbffe178ad0" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.111955 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.125641 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9p9x6"] Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.136017 4863 scope.go:117] "RemoveContainer" containerID="4a5d4262e13038176f3a0d29e7ad6deaede03014ee79e490312bd26825517428" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.606203 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:38:10 crc kubenswrapper[4863]: E1205 07:38:10.607738 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:38:10 crc kubenswrapper[4863]: I1205 07:38:10.623613 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" path="/var/lib/kubelet/pods/9b8f6f97-ba75-4706-aaef-cbaf00e0b338/volumes" Dec 05 07:38:18 crc kubenswrapper[4863]: I1205 07:38:18.414856 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:18 crc kubenswrapper[4863]: I1205 07:38:18.505693 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:19 crc kubenswrapper[4863]: I1205 07:38:19.661878 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:38:20 crc kubenswrapper[4863]: I1205 07:38:20.158418 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bnk8n" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="registry-server" containerID="cri-o://e4574fe6753b0f779250a7b43a48d7edfea174f4912aab557f3c7d39fd37b733" gracePeriod=2 Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.178065 4863 generic.go:334] "Generic (PLEG): container finished" podID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerID="e4574fe6753b0f779250a7b43a48d7edfea174f4912aab557f3c7d39fd37b733" exitCode=0 Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.178227 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerDied","Data":"e4574fe6753b0f779250a7b43a48d7edfea174f4912aab557f3c7d39fd37b733"} Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.471684 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.588610 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content\") pod \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.588764 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities\") pod \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.588895 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b95h2\" (UniqueName: \"kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2\") pod \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\" (UID: \"1847c2d9-5acc-4856-aeb4-3137e3f781b6\") " Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.590266 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities" (OuterVolumeSpecName: "utilities") pod "1847c2d9-5acc-4856-aeb4-3137e3f781b6" (UID: "1847c2d9-5acc-4856-aeb4-3137e3f781b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.593674 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2" (OuterVolumeSpecName: "kube-api-access-b95h2") pod "1847c2d9-5acc-4856-aeb4-3137e3f781b6" (UID: "1847c2d9-5acc-4856-aeb4-3137e3f781b6"). InnerVolumeSpecName "kube-api-access-b95h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.690792 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.690830 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b95h2\" (UniqueName: \"kubernetes.io/projected/1847c2d9-5acc-4856-aeb4-3137e3f781b6-kube-api-access-b95h2\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.734926 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1847c2d9-5acc-4856-aeb4-3137e3f781b6" (UID: "1847c2d9-5acc-4856-aeb4-3137e3f781b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:38:22 crc kubenswrapper[4863]: I1205 07:38:22.792504 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1847c2d9-5acc-4856-aeb4-3137e3f781b6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.190354 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bnk8n" event={"ID":"1847c2d9-5acc-4856-aeb4-3137e3f781b6","Type":"ContainerDied","Data":"aa58af07d1896a747b04214f8b4cdf455e8d9dd55924ff6cb9b232617645f7db"} Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.190439 4863 scope.go:117] "RemoveContainer" containerID="e4574fe6753b0f779250a7b43a48d7edfea174f4912aab557f3c7d39fd37b733" Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.190450 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bnk8n" Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.211988 4863 scope.go:117] "RemoveContainer" containerID="f41040028da9297291e4bc0b73c0cff8e9dde3c424a7dbfe47794dad2f37f30a" Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.246800 4863 scope.go:117] "RemoveContainer" containerID="1e512f05e5ca9ae327a69e26c2f120edabb59bd5c9e247b2f6a3a56c1a107800" Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.247741 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:38:23 crc kubenswrapper[4863]: I1205 07:38:23.256919 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bnk8n"] Dec 05 07:38:24 crc kubenswrapper[4863]: I1205 07:38:24.618605 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" path="/var/lib/kubelet/pods/1847c2d9-5acc-4856-aeb4-3137e3f781b6/volumes" Dec 05 07:38:25 crc kubenswrapper[4863]: I1205 07:38:25.601912 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:38:25 crc kubenswrapper[4863]: E1205 07:38:25.602540 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:38:39 crc kubenswrapper[4863]: I1205 07:38:39.601632 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:38:39 crc kubenswrapper[4863]: E1205 07:38:39.602695 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:38:52 crc kubenswrapper[4863]: I1205 07:38:52.607389 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:38:52 crc kubenswrapper[4863]: E1205 07:38:52.609466 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:39:05 crc kubenswrapper[4863]: I1205 07:39:05.602583 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:39:05 crc kubenswrapper[4863]: E1205 07:39:05.603511 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:39:16 crc kubenswrapper[4863]: I1205 07:39:16.603083 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:39:16 crc kubenswrapper[4863]: E1205 07:39:16.604256 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:39:29 crc kubenswrapper[4863]: I1205 07:39:29.601981 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:39:29 crc kubenswrapper[4863]: E1205 07:39:29.602629 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:39:43 crc kubenswrapper[4863]: I1205 07:39:43.602176 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:39:43 crc kubenswrapper[4863]: E1205 07:39:43.603039 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:39:56 crc kubenswrapper[4863]: I1205 07:39:56.602997 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:39:56 crc kubenswrapper[4863]: E1205 07:39:56.604399 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:40:07 crc kubenswrapper[4863]: I1205 07:40:07.602419 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:40:07 crc kubenswrapper[4863]: E1205 07:40:07.603549 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:40:20 crc kubenswrapper[4863]: I1205 07:40:20.602405 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:40:21 crc kubenswrapper[4863]: I1205 07:40:21.336720 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3"} Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.255883 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256804 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256820 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256840 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="extract-content" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256849 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="extract-content" Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256885 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256897 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256911 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="extract-utilities" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256922 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="extract-utilities" Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256932 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="extract-utilities" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256941 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="extract-utilities" Dec 05 07:41:04 crc kubenswrapper[4863]: E1205 07:41:04.256959 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="extract-content" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.256967 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="extract-content" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.257153 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1847c2d9-5acc-4856-aeb4-3137e3f781b6" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.257182 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b8f6f97-ba75-4706-aaef-cbaf00e0b338" containerName="registry-server" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.259150 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.320283 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.350822 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lw2m\" (UniqueName: \"kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.350924 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.350981 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.452423 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.452577 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lw2m\" (UniqueName: \"kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.452620 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.453134 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.453515 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.472227 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lw2m\" (UniqueName: \"kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m\") pod \"certified-operators-nwwsn\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:04 crc kubenswrapper[4863]: I1205 07:41:04.580889 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:05 crc kubenswrapper[4863]: I1205 07:41:05.034773 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:05 crc kubenswrapper[4863]: I1205 07:41:05.765260 4863 generic.go:334] "Generic (PLEG): container finished" podID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerID="a54b919aafe49a494b9d07e25faee0fb85b99f0a1d934c29e3d56bfc22aebdd7" exitCode=0 Dec 05 07:41:05 crc kubenswrapper[4863]: I1205 07:41:05.765327 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerDied","Data":"a54b919aafe49a494b9d07e25faee0fb85b99f0a1d934c29e3d56bfc22aebdd7"} Dec 05 07:41:05 crc kubenswrapper[4863]: I1205 07:41:05.765370 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerStarted","Data":"47feae6984d2173720337892f7b59cd8a6f3d6f8a6b1d4c21ccc7f9959496185"} Dec 05 07:41:06 crc kubenswrapper[4863]: I1205 07:41:06.777028 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerStarted","Data":"bb3360c9fc6f1015c352644cdf1d11dbb6fc203a6278973401364979c68c1a01"} Dec 05 07:41:07 crc kubenswrapper[4863]: I1205 07:41:07.796178 4863 generic.go:334] "Generic (PLEG): container finished" podID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerID="bb3360c9fc6f1015c352644cdf1d11dbb6fc203a6278973401364979c68c1a01" exitCode=0 Dec 05 07:41:07 crc kubenswrapper[4863]: I1205 07:41:07.796237 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerDied","Data":"bb3360c9fc6f1015c352644cdf1d11dbb6fc203a6278973401364979c68c1a01"} Dec 05 07:41:08 crc kubenswrapper[4863]: I1205 07:41:08.805294 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerStarted","Data":"dd637cdb3cd589fab7ef5eeda506314b41f1b265d682fef8f9142cc2507849d7"} Dec 05 07:41:08 crc kubenswrapper[4863]: I1205 07:41:08.830955 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nwwsn" podStartSLOduration=2.357190502 podStartE2EDuration="4.830930589s" podCreationTimestamp="2025-12-05 07:41:04 +0000 UTC" firstStartedPulling="2025-12-05 07:41:05.767766632 +0000 UTC m=+3293.493763712" lastFinishedPulling="2025-12-05 07:41:08.241506749 +0000 UTC m=+3295.967503799" observedRunningTime="2025-12-05 07:41:08.827573506 +0000 UTC m=+3296.553570556" watchObservedRunningTime="2025-12-05 07:41:08.830930589 +0000 UTC m=+3296.556927629" Dec 05 07:41:14 crc kubenswrapper[4863]: I1205 07:41:14.581723 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:14 crc kubenswrapper[4863]: I1205 07:41:14.582341 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:14 crc kubenswrapper[4863]: I1205 07:41:14.637794 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:14 crc kubenswrapper[4863]: I1205 07:41:14.916081 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:14 crc kubenswrapper[4863]: I1205 07:41:14.976593 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:16 crc kubenswrapper[4863]: I1205 07:41:16.871186 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nwwsn" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="registry-server" containerID="cri-o://dd637cdb3cd589fab7ef5eeda506314b41f1b265d682fef8f9142cc2507849d7" gracePeriod=2 Dec 05 07:41:17 crc kubenswrapper[4863]: I1205 07:41:17.879800 4863 generic.go:334] "Generic (PLEG): container finished" podID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerID="dd637cdb3cd589fab7ef5eeda506314b41f1b265d682fef8f9142cc2507849d7" exitCode=0 Dec 05 07:41:17 crc kubenswrapper[4863]: I1205 07:41:17.880147 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerDied","Data":"dd637cdb3cd589fab7ef5eeda506314b41f1b265d682fef8f9142cc2507849d7"} Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.398776 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.479641 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities\") pod \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.479945 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lw2m\" (UniqueName: \"kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m\") pod \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.479990 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content\") pod \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\" (UID: \"73e050bf-3a21-407e-a79b-627bbcc0ddfd\") " Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.481634 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities" (OuterVolumeSpecName: "utilities") pod "73e050bf-3a21-407e-a79b-627bbcc0ddfd" (UID: "73e050bf-3a21-407e-a79b-627bbcc0ddfd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.490582 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m" (OuterVolumeSpecName: "kube-api-access-2lw2m") pod "73e050bf-3a21-407e-a79b-627bbcc0ddfd" (UID: "73e050bf-3a21-407e-a79b-627bbcc0ddfd"). InnerVolumeSpecName "kube-api-access-2lw2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.533229 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73e050bf-3a21-407e-a79b-627bbcc0ddfd" (UID: "73e050bf-3a21-407e-a79b-627bbcc0ddfd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.582119 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.582400 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lw2m\" (UniqueName: \"kubernetes.io/projected/73e050bf-3a21-407e-a79b-627bbcc0ddfd-kube-api-access-2lw2m\") on node \"crc\" DevicePath \"\"" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.582608 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e050bf-3a21-407e-a79b-627bbcc0ddfd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.896673 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwwsn" event={"ID":"73e050bf-3a21-407e-a79b-627bbcc0ddfd","Type":"ContainerDied","Data":"47feae6984d2173720337892f7b59cd8a6f3d6f8a6b1d4c21ccc7f9959496185"} Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.896793 4863 scope.go:117] "RemoveContainer" containerID="dd637cdb3cd589fab7ef5eeda506314b41f1b265d682fef8f9142cc2507849d7" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.896992 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwwsn" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.933661 4863 scope.go:117] "RemoveContainer" containerID="bb3360c9fc6f1015c352644cdf1d11dbb6fc203a6278973401364979c68c1a01" Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.935411 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.944904 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nwwsn"] Dec 05 07:41:18 crc kubenswrapper[4863]: I1205 07:41:18.966280 4863 scope.go:117] "RemoveContainer" containerID="a54b919aafe49a494b9d07e25faee0fb85b99f0a1d934c29e3d56bfc22aebdd7" Dec 05 07:41:20 crc kubenswrapper[4863]: I1205 07:41:20.617559 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" path="/var/lib/kubelet/pods/73e050bf-3a21-407e-a79b-627bbcc0ddfd/volumes" Dec 05 07:42:38 crc kubenswrapper[4863]: I1205 07:42:38.464992 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:42:38 crc kubenswrapper[4863]: I1205 07:42:38.465710 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:43:08 crc kubenswrapper[4863]: I1205 07:43:08.464418 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:43:08 crc kubenswrapper[4863]: I1205 07:43:08.465864 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:43:38 crc kubenswrapper[4863]: I1205 07:43:38.464558 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:43:38 crc kubenswrapper[4863]: I1205 07:43:38.465193 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:43:38 crc kubenswrapper[4863]: I1205 07:43:38.465262 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:43:38 crc kubenswrapper[4863]: I1205 07:43:38.466321 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:43:38 crc kubenswrapper[4863]: I1205 07:43:38.466414 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3" gracePeriod=600 Dec 05 07:43:39 crc kubenswrapper[4863]: I1205 07:43:39.225867 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3" exitCode=0 Dec 05 07:43:39 crc kubenswrapper[4863]: I1205 07:43:39.225921 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3"} Dec 05 07:43:39 crc kubenswrapper[4863]: I1205 07:43:39.226230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829"} Dec 05 07:43:39 crc kubenswrapper[4863]: I1205 07:43:39.226256 4863 scope.go:117] "RemoveContainer" containerID="6808eb81f4d57b31be029d7dd49d32e0363765d4382bfbc0f0b38cae2b09e360" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.147843 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd"] Dec 05 07:45:00 crc kubenswrapper[4863]: E1205 07:45:00.148707 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="extract-content" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.148720 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="extract-content" Dec 05 07:45:00 crc kubenswrapper[4863]: E1205 07:45:00.148735 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="registry-server" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.148742 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="registry-server" Dec 05 07:45:00 crc kubenswrapper[4863]: E1205 07:45:00.148756 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="extract-utilities" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.148763 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="extract-utilities" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.148902 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="73e050bf-3a21-407e-a79b-627bbcc0ddfd" containerName="registry-server" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.149414 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.151370 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.151615 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.158395 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd"] Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.272262 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.272582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.272701 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fpn9\" (UniqueName: \"kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.374639 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.374958 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fpn9\" (UniqueName: \"kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.375117 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.376102 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.380658 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.390074 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fpn9\" (UniqueName: \"kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9\") pod \"collect-profiles-29415345-c6hwd\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.475637 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:00 crc kubenswrapper[4863]: I1205 07:45:00.899914 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd"] Dec 05 07:45:01 crc kubenswrapper[4863]: I1205 07:45:01.076429 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" event={"ID":"5bc1e486-4f3a-49e3-bca3-01cf38552df9","Type":"ContainerStarted","Data":"db3a1b8262e7ee1cd5b9503d0bdd8e0ced83df5ad3a16f22301b1186b0700e37"} Dec 05 07:45:01 crc kubenswrapper[4863]: I1205 07:45:01.076807 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" event={"ID":"5bc1e486-4f3a-49e3-bca3-01cf38552df9","Type":"ContainerStarted","Data":"964dad090b25f69039a727a7a9056eb66046e0cce8387809743854139df67d73"} Dec 05 07:45:01 crc kubenswrapper[4863]: I1205 07:45:01.091935 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" podStartSLOduration=1.091914447 podStartE2EDuration="1.091914447s" podCreationTimestamp="2025-12-05 07:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 07:45:01.091787393 +0000 UTC m=+3528.817784423" watchObservedRunningTime="2025-12-05 07:45:01.091914447 +0000 UTC m=+3528.817911487" Dec 05 07:45:02 crc kubenswrapper[4863]: I1205 07:45:02.085594 4863 generic.go:334] "Generic (PLEG): container finished" podID="5bc1e486-4f3a-49e3-bca3-01cf38552df9" containerID="db3a1b8262e7ee1cd5b9503d0bdd8e0ced83df5ad3a16f22301b1186b0700e37" exitCode=0 Dec 05 07:45:02 crc kubenswrapper[4863]: I1205 07:45:02.085646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" event={"ID":"5bc1e486-4f3a-49e3-bca3-01cf38552df9","Type":"ContainerDied","Data":"db3a1b8262e7ee1cd5b9503d0bdd8e0ced83df5ad3a16f22301b1186b0700e37"} Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.395629 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.523328 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume\") pod \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.523434 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fpn9\" (UniqueName: \"kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9\") pod \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.523511 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume\") pod \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\" (UID: \"5bc1e486-4f3a-49e3-bca3-01cf38552df9\") " Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.523981 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume" (OuterVolumeSpecName: "config-volume") pod "5bc1e486-4f3a-49e3-bca3-01cf38552df9" (UID: "5bc1e486-4f3a-49e3-bca3-01cf38552df9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.531978 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5bc1e486-4f3a-49e3-bca3-01cf38552df9" (UID: "5bc1e486-4f3a-49e3-bca3-01cf38552df9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.533221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9" (OuterVolumeSpecName: "kube-api-access-5fpn9") pod "5bc1e486-4f3a-49e3-bca3-01cf38552df9" (UID: "5bc1e486-4f3a-49e3-bca3-01cf38552df9"). InnerVolumeSpecName "kube-api-access-5fpn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.625394 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5bc1e486-4f3a-49e3-bca3-01cf38552df9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.625442 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fpn9\" (UniqueName: \"kubernetes.io/projected/5bc1e486-4f3a-49e3-bca3-01cf38552df9-kube-api-access-5fpn9\") on node \"crc\" DevicePath \"\"" Dec 05 07:45:03 crc kubenswrapper[4863]: I1205 07:45:03.625460 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5bc1e486-4f3a-49e3-bca3-01cf38552df9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.112598 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" event={"ID":"5bc1e486-4f3a-49e3-bca3-01cf38552df9","Type":"ContainerDied","Data":"964dad090b25f69039a727a7a9056eb66046e0cce8387809743854139df67d73"} Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.112675 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="964dad090b25f69039a727a7a9056eb66046e0cce8387809743854139df67d73" Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.112776 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd" Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.502880 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh"] Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.510281 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-b4tdh"] Dec 05 07:45:04 crc kubenswrapper[4863]: I1205 07:45:04.611257 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83505046-0a0e-45a7-9b31-ba9854f03e00" path="/var/lib/kubelet/pods/83505046-0a0e-45a7-9b31-ba9854f03e00/volumes" Dec 05 07:45:25 crc kubenswrapper[4863]: I1205 07:45:25.567828 4863 scope.go:117] "RemoveContainer" containerID="1165bf6fe410c69e74b802bdee134934a25c0e41f0c35d72251eda196de0df35" Dec 05 07:45:38 crc kubenswrapper[4863]: I1205 07:45:38.464552 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:45:38 crc kubenswrapper[4863]: I1205 07:45:38.465271 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:46:08 crc kubenswrapper[4863]: I1205 07:46:08.464509 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:46:08 crc kubenswrapper[4863]: I1205 07:46:08.465754 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:46:38 crc kubenswrapper[4863]: I1205 07:46:38.464436 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:46:38 crc kubenswrapper[4863]: I1205 07:46:38.465071 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:46:38 crc kubenswrapper[4863]: I1205 07:46:38.465156 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:46:38 crc kubenswrapper[4863]: I1205 07:46:38.465898 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:46:38 crc kubenswrapper[4863]: I1205 07:46:38.465969 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" gracePeriod=600 Dec 05 07:46:38 crc kubenswrapper[4863]: E1205 07:46:38.600183 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:46:39 crc kubenswrapper[4863]: I1205 07:46:39.216935 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" exitCode=0 Dec 05 07:46:39 crc kubenswrapper[4863]: I1205 07:46:39.217029 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829"} Dec 05 07:46:39 crc kubenswrapper[4863]: I1205 07:46:39.217314 4863 scope.go:117] "RemoveContainer" containerID="c3e01ec6c90887a3b71c4ddcb09cb71ffd0cfa6bbd5df2df86909ba7bb3fcba3" Dec 05 07:46:39 crc kubenswrapper[4863]: I1205 07:46:39.218177 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:46:39 crc kubenswrapper[4863]: E1205 07:46:39.218852 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:46:49 crc kubenswrapper[4863]: I1205 07:46:49.601710 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:46:49 crc kubenswrapper[4863]: E1205 07:46:49.602733 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:47:03 crc kubenswrapper[4863]: I1205 07:47:03.602203 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:47:03 crc kubenswrapper[4863]: E1205 07:47:03.603422 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:47:14 crc kubenswrapper[4863]: I1205 07:47:14.603140 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:47:14 crc kubenswrapper[4863]: E1205 07:47:14.605402 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:47:27 crc kubenswrapper[4863]: I1205 07:47:27.602564 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:47:27 crc kubenswrapper[4863]: E1205 07:47:27.603584 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:47:40 crc kubenswrapper[4863]: I1205 07:47:40.602643 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:47:40 crc kubenswrapper[4863]: E1205 07:47:40.603639 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:47:53 crc kubenswrapper[4863]: I1205 07:47:53.602792 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:47:53 crc kubenswrapper[4863]: E1205 07:47:53.603875 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:48:05 crc kubenswrapper[4863]: I1205 07:48:05.602352 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:48:05 crc kubenswrapper[4863]: E1205 07:48:05.603222 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:48:16 crc kubenswrapper[4863]: I1205 07:48:16.602258 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:48:16 crc kubenswrapper[4863]: E1205 07:48:16.603289 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.181344 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:21 crc kubenswrapper[4863]: E1205 07:48:21.182682 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bc1e486-4f3a-49e3-bca3-01cf38552df9" containerName="collect-profiles" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.182716 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bc1e486-4f3a-49e3-bca3-01cf38552df9" containerName="collect-profiles" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.183102 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bc1e486-4f3a-49e3-bca3-01cf38552df9" containerName="collect-profiles" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.185691 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.210800 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.341135 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.341207 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkj57\" (UniqueName: \"kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.341269 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.442380 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.442442 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkj57\" (UniqueName: \"kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.442482 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.443055 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.443053 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.466001 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkj57\" (UniqueName: \"kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57\") pod \"redhat-operators-s2j4w\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.541048 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:21 crc kubenswrapper[4863]: I1205 07:48:21.996786 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:22 crc kubenswrapper[4863]: I1205 07:48:22.180355 4863 generic.go:334] "Generic (PLEG): container finished" podID="61a78a10-8d75-454d-8d62-83481dc91544" containerID="c00411d90dd0cfb6d68e34b60d9e8fc06829aa97458f6603695e2098c1813db8" exitCode=0 Dec 05 07:48:22 crc kubenswrapper[4863]: I1205 07:48:22.180395 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerDied","Data":"c00411d90dd0cfb6d68e34b60d9e8fc06829aa97458f6603695e2098c1813db8"} Dec 05 07:48:22 crc kubenswrapper[4863]: I1205 07:48:22.180420 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerStarted","Data":"d2a4501905a82fdf486ac1288cd8900cda8fe661ff39e79a92851519dd2ba073"} Dec 05 07:48:22 crc kubenswrapper[4863]: I1205 07:48:22.182073 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:48:23 crc kubenswrapper[4863]: I1205 07:48:23.188996 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerStarted","Data":"86146defddd47f24e9f40db330540afc34d1806c97a4706f53c2ba4640191074"} Dec 05 07:48:24 crc kubenswrapper[4863]: I1205 07:48:24.200899 4863 generic.go:334] "Generic (PLEG): container finished" podID="61a78a10-8d75-454d-8d62-83481dc91544" containerID="86146defddd47f24e9f40db330540afc34d1806c97a4706f53c2ba4640191074" exitCode=0 Dec 05 07:48:24 crc kubenswrapper[4863]: I1205 07:48:24.200979 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerDied","Data":"86146defddd47f24e9f40db330540afc34d1806c97a4706f53c2ba4640191074"} Dec 05 07:48:25 crc kubenswrapper[4863]: I1205 07:48:25.212461 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerStarted","Data":"c21a44bcebfb92ac6d9de3e1c75bd128dd50d728e31952f317a1e979896cbc5b"} Dec 05 07:48:25 crc kubenswrapper[4863]: I1205 07:48:25.241289 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s2j4w" podStartSLOduration=1.6707773110000002 podStartE2EDuration="4.241265574s" podCreationTimestamp="2025-12-05 07:48:21 +0000 UTC" firstStartedPulling="2025-12-05 07:48:22.181849696 +0000 UTC m=+3729.907846726" lastFinishedPulling="2025-12-05 07:48:24.752337909 +0000 UTC m=+3732.478334989" observedRunningTime="2025-12-05 07:48:25.239100691 +0000 UTC m=+3732.965097771" watchObservedRunningTime="2025-12-05 07:48:25.241265574 +0000 UTC m=+3732.967262614" Dec 05 07:48:28 crc kubenswrapper[4863]: I1205 07:48:28.602526 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:48:28 crc kubenswrapper[4863]: E1205 07:48:28.603575 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:48:31 crc kubenswrapper[4863]: I1205 07:48:31.541955 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:31 crc kubenswrapper[4863]: I1205 07:48:31.542006 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:31 crc kubenswrapper[4863]: I1205 07:48:31.619987 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:32 crc kubenswrapper[4863]: I1205 07:48:32.333418 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:32 crc kubenswrapper[4863]: I1205 07:48:32.404665 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:34 crc kubenswrapper[4863]: I1205 07:48:34.284677 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s2j4w" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="registry-server" containerID="cri-o://c21a44bcebfb92ac6d9de3e1c75bd128dd50d728e31952f317a1e979896cbc5b" gracePeriod=2 Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.298088 4863 generic.go:334] "Generic (PLEG): container finished" podID="61a78a10-8d75-454d-8d62-83481dc91544" containerID="c21a44bcebfb92ac6d9de3e1c75bd128dd50d728e31952f317a1e979896cbc5b" exitCode=0 Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.298149 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerDied","Data":"c21a44bcebfb92ac6d9de3e1c75bd128dd50d728e31952f317a1e979896cbc5b"} Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.868203 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.990851 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities\") pod \"61a78a10-8d75-454d-8d62-83481dc91544\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.990922 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkj57\" (UniqueName: \"kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57\") pod \"61a78a10-8d75-454d-8d62-83481dc91544\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.991024 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content\") pod \"61a78a10-8d75-454d-8d62-83481dc91544\" (UID: \"61a78a10-8d75-454d-8d62-83481dc91544\") " Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.992382 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities" (OuterVolumeSpecName: "utilities") pod "61a78a10-8d75-454d-8d62-83481dc91544" (UID: "61a78a10-8d75-454d-8d62-83481dc91544"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:48:35 crc kubenswrapper[4863]: I1205 07:48:35.996825 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57" (OuterVolumeSpecName: "kube-api-access-jkj57") pod "61a78a10-8d75-454d-8d62-83481dc91544" (UID: "61a78a10-8d75-454d-8d62-83481dc91544"). InnerVolumeSpecName "kube-api-access-jkj57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.093046 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.093075 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkj57\" (UniqueName: \"kubernetes.io/projected/61a78a10-8d75-454d-8d62-83481dc91544-kube-api-access-jkj57\") on node \"crc\" DevicePath \"\"" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.114742 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61a78a10-8d75-454d-8d62-83481dc91544" (UID: "61a78a10-8d75-454d-8d62-83481dc91544"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.193919 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a78a10-8d75-454d-8d62-83481dc91544-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.314642 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2j4w" event={"ID":"61a78a10-8d75-454d-8d62-83481dc91544","Type":"ContainerDied","Data":"d2a4501905a82fdf486ac1288cd8900cda8fe661ff39e79a92851519dd2ba073"} Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.314723 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2j4w" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.314731 4863 scope.go:117] "RemoveContainer" containerID="c21a44bcebfb92ac6d9de3e1c75bd128dd50d728e31952f317a1e979896cbc5b" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.359362 4863 scope.go:117] "RemoveContainer" containerID="86146defddd47f24e9f40db330540afc34d1806c97a4706f53c2ba4640191074" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.371844 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.381888 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s2j4w"] Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.396385 4863 scope.go:117] "RemoveContainer" containerID="c00411d90dd0cfb6d68e34b60d9e8fc06829aa97458f6603695e2098c1813db8" Dec 05 07:48:36 crc kubenswrapper[4863]: I1205 07:48:36.622101 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61a78a10-8d75-454d-8d62-83481dc91544" path="/var/lib/kubelet/pods/61a78a10-8d75-454d-8d62-83481dc91544/volumes" Dec 05 07:48:43 crc kubenswrapper[4863]: I1205 07:48:43.601914 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:48:43 crc kubenswrapper[4863]: E1205 07:48:43.603404 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:48:55 crc kubenswrapper[4863]: I1205 07:48:55.602835 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:48:55 crc kubenswrapper[4863]: E1205 07:48:55.604220 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:49:09 crc kubenswrapper[4863]: I1205 07:49:09.602403 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:49:09 crc kubenswrapper[4863]: E1205 07:49:09.603824 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:49:24 crc kubenswrapper[4863]: I1205 07:49:24.602028 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:49:24 crc kubenswrapper[4863]: E1205 07:49:24.602781 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:49:37 crc kubenswrapper[4863]: I1205 07:49:37.602572 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:49:37 crc kubenswrapper[4863]: E1205 07:49:37.603343 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:49:51 crc kubenswrapper[4863]: I1205 07:49:51.602523 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:49:51 crc kubenswrapper[4863]: E1205 07:49:51.603761 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:50:06 crc kubenswrapper[4863]: I1205 07:50:06.602779 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:50:06 crc kubenswrapper[4863]: E1205 07:50:06.603875 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:50:21 crc kubenswrapper[4863]: I1205 07:50:21.602944 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:50:21 crc kubenswrapper[4863]: E1205 07:50:21.603760 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:50:33 crc kubenswrapper[4863]: I1205 07:50:33.602407 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:50:33 crc kubenswrapper[4863]: E1205 07:50:33.604998 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:50:48 crc kubenswrapper[4863]: I1205 07:50:48.602328 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:50:48 crc kubenswrapper[4863]: E1205 07:50:48.603301 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:51:03 crc kubenswrapper[4863]: I1205 07:51:03.601843 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:51:03 crc kubenswrapper[4863]: E1205 07:51:03.603092 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.040219 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:10 crc kubenswrapper[4863]: E1205 07:51:10.043434 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="extract-content" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.043502 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="extract-content" Dec 05 07:51:10 crc kubenswrapper[4863]: E1205 07:51:10.043548 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="extract-utilities" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.043570 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="extract-utilities" Dec 05 07:51:10 crc kubenswrapper[4863]: E1205 07:51:10.043626 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="registry-server" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.043646 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="registry-server" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.044010 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="61a78a10-8d75-454d-8d62-83481dc91544" containerName="registry-server" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.046179 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.065548 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.122763 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.122834 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.122876 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc57s\" (UniqueName: \"kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.224103 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.224189 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.224252 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc57s\" (UniqueName: \"kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.224696 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.224774 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.280260 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc57s\" (UniqueName: \"kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s\") pod \"certified-operators-jvfkt\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.370588 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:10 crc kubenswrapper[4863]: I1205 07:51:10.755215 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:11 crc kubenswrapper[4863]: I1205 07:51:11.713811 4863 generic.go:334] "Generic (PLEG): container finished" podID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerID="7d18e9d2491bd4f1474898451ea5d7761097f2e7ff151e5466ffdc5a0effa82a" exitCode=0 Dec 05 07:51:11 crc kubenswrapper[4863]: I1205 07:51:11.713921 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerDied","Data":"7d18e9d2491bd4f1474898451ea5d7761097f2e7ff151e5466ffdc5a0effa82a"} Dec 05 07:51:11 crc kubenswrapper[4863]: I1205 07:51:11.714247 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerStarted","Data":"6433549752aad8d91ff388a3b2d2198cfff1a3e25eff2db19737915e86176682"} Dec 05 07:51:12 crc kubenswrapper[4863]: I1205 07:51:12.728080 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerStarted","Data":"04953cad214a348d91e8d13587bedb6071d0e24767de55695ba2ba39f969cd15"} Dec 05 07:51:13 crc kubenswrapper[4863]: I1205 07:51:13.741110 4863 generic.go:334] "Generic (PLEG): container finished" podID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerID="04953cad214a348d91e8d13587bedb6071d0e24767de55695ba2ba39f969cd15" exitCode=0 Dec 05 07:51:13 crc kubenswrapper[4863]: I1205 07:51:13.741196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerDied","Data":"04953cad214a348d91e8d13587bedb6071d0e24767de55695ba2ba39f969cd15"} Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.415897 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.418605 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.444386 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.491880 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-492l5\" (UniqueName: \"kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.491966 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.492060 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.592961 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-492l5\" (UniqueName: \"kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.593024 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.593091 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.593692 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.593769 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.623859 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-492l5\" (UniqueName: \"kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5\") pod \"redhat-marketplace-znpx7\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.734996 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.748795 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerStarted","Data":"741d58655a2fe37f849ad9426983262fcd33918cf6f7415287773a96a6ea8924"} Dec 05 07:51:14 crc kubenswrapper[4863]: I1205 07:51:14.783226 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jvfkt" podStartSLOduration=2.278221226 podStartE2EDuration="4.7832069s" podCreationTimestamp="2025-12-05 07:51:10 +0000 UTC" firstStartedPulling="2025-12-05 07:51:11.71719584 +0000 UTC m=+3899.443192920" lastFinishedPulling="2025-12-05 07:51:14.222181514 +0000 UTC m=+3901.948178594" observedRunningTime="2025-12-05 07:51:14.778836194 +0000 UTC m=+3902.504833234" watchObservedRunningTime="2025-12-05 07:51:14.7832069 +0000 UTC m=+3902.509203960" Dec 05 07:51:15 crc kubenswrapper[4863]: I1205 07:51:15.244860 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:15 crc kubenswrapper[4863]: I1205 07:51:15.760826 4863 generic.go:334] "Generic (PLEG): container finished" podID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerID="9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3" exitCode=0 Dec 05 07:51:15 crc kubenswrapper[4863]: I1205 07:51:15.761076 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerDied","Data":"9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3"} Dec 05 07:51:15 crc kubenswrapper[4863]: I1205 07:51:15.762658 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerStarted","Data":"a356187e40ccea5480ee4b4828cd0efbda0822048a90492732e45713e92edc27"} Dec 05 07:51:17 crc kubenswrapper[4863]: I1205 07:51:17.601891 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:51:17 crc kubenswrapper[4863]: E1205 07:51:17.602407 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:51:17 crc kubenswrapper[4863]: I1205 07:51:17.789755 4863 generic.go:334] "Generic (PLEG): container finished" podID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerID="371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14" exitCode=0 Dec 05 07:51:17 crc kubenswrapper[4863]: I1205 07:51:17.789812 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerDied","Data":"371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14"} Dec 05 07:51:18 crc kubenswrapper[4863]: I1205 07:51:18.813162 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerStarted","Data":"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df"} Dec 05 07:51:18 crc kubenswrapper[4863]: I1205 07:51:18.852755 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-znpx7" podStartSLOduration=2.235048635 podStartE2EDuration="4.852728779s" podCreationTimestamp="2025-12-05 07:51:14 +0000 UTC" firstStartedPulling="2025-12-05 07:51:15.762948269 +0000 UTC m=+3903.488945349" lastFinishedPulling="2025-12-05 07:51:18.380628403 +0000 UTC m=+3906.106625493" observedRunningTime="2025-12-05 07:51:18.843847832 +0000 UTC m=+3906.569844912" watchObservedRunningTime="2025-12-05 07:51:18.852728779 +0000 UTC m=+3906.578725859" Dec 05 07:51:20 crc kubenswrapper[4863]: I1205 07:51:20.371821 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:20 crc kubenswrapper[4863]: I1205 07:51:20.371887 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:20 crc kubenswrapper[4863]: I1205 07:51:20.427437 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:20 crc kubenswrapper[4863]: I1205 07:51:20.886119 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:21 crc kubenswrapper[4863]: I1205 07:51:21.601952 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:22 crc kubenswrapper[4863]: I1205 07:51:22.853437 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jvfkt" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="registry-server" containerID="cri-o://741d58655a2fe37f849ad9426983262fcd33918cf6f7415287773a96a6ea8924" gracePeriod=2 Dec 05 07:51:23 crc kubenswrapper[4863]: I1205 07:51:23.866572 4863 generic.go:334] "Generic (PLEG): container finished" podID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerID="741d58655a2fe37f849ad9426983262fcd33918cf6f7415287773a96a6ea8924" exitCode=0 Dec 05 07:51:23 crc kubenswrapper[4863]: I1205 07:51:23.866721 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerDied","Data":"741d58655a2fe37f849ad9426983262fcd33918cf6f7415287773a96a6ea8924"} Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.450830 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.597245 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content\") pod \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.597396 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities\") pod \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.597574 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc57s\" (UniqueName: \"kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s\") pod \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\" (UID: \"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20\") " Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.598871 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities" (OuterVolumeSpecName: "utilities") pod "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" (UID: "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.607613 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s" (OuterVolumeSpecName: "kube-api-access-wc57s") pod "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" (UID: "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20"). InnerVolumeSpecName "kube-api-access-wc57s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.697301 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" (UID: "dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.702142 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc57s\" (UniqueName: \"kubernetes.io/projected/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-kube-api-access-wc57s\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.702195 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.702210 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.737481 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.737573 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.819716 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.875572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvfkt" event={"ID":"dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20","Type":"ContainerDied","Data":"6433549752aad8d91ff388a3b2d2198cfff1a3e25eff2db19737915e86176682"} Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.875609 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvfkt" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.875627 4863 scope.go:117] "RemoveContainer" containerID="741d58655a2fe37f849ad9426983262fcd33918cf6f7415287773a96a6ea8924" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.908689 4863 scope.go:117] "RemoveContainer" containerID="04953cad214a348d91e8d13587bedb6071d0e24767de55695ba2ba39f969cd15" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.920663 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.927417 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jvfkt"] Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.944209 4863 scope.go:117] "RemoveContainer" containerID="7d18e9d2491bd4f1474898451ea5d7761097f2e7ff151e5466ffdc5a0effa82a" Dec 05 07:51:24 crc kubenswrapper[4863]: I1205 07:51:24.953819 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:26 crc kubenswrapper[4863]: I1205 07:51:26.609913 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" path="/var/lib/kubelet/pods/dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20/volumes" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.204351 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.204723 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-znpx7" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="registry-server" containerID="cri-o://67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df" gracePeriod=2 Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.643006 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.743332 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities\") pod \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.743406 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content\") pod \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.743513 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-492l5\" (UniqueName: \"kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5\") pod \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\" (UID: \"4230a661-77c4-40cc-bc13-ab4a1f0cc27b\") " Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.744150 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities" (OuterVolumeSpecName: "utilities") pod "4230a661-77c4-40cc-bc13-ab4a1f0cc27b" (UID: "4230a661-77c4-40cc-bc13-ab4a1f0cc27b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.757359 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5" (OuterVolumeSpecName: "kube-api-access-492l5") pod "4230a661-77c4-40cc-bc13-ab4a1f0cc27b" (UID: "4230a661-77c4-40cc-bc13-ab4a1f0cc27b"). InnerVolumeSpecName "kube-api-access-492l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.790084 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4230a661-77c4-40cc-bc13-ab4a1f0cc27b" (UID: "4230a661-77c4-40cc-bc13-ab4a1f0cc27b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.845200 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.845234 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.845269 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-492l5\" (UniqueName: \"kubernetes.io/projected/4230a661-77c4-40cc-bc13-ab4a1f0cc27b-kube-api-access-492l5\") on node \"crc\" DevicePath \"\"" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.900028 4863 generic.go:334] "Generic (PLEG): container finished" podID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerID="67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df" exitCode=0 Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.900093 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerDied","Data":"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df"} Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.900125 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-znpx7" event={"ID":"4230a661-77c4-40cc-bc13-ab4a1f0cc27b","Type":"ContainerDied","Data":"a356187e40ccea5480ee4b4828cd0efbda0822048a90492732e45713e92edc27"} Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.900145 4863 scope.go:117] "RemoveContainer" containerID="67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.900320 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-znpx7" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.930194 4863 scope.go:117] "RemoveContainer" containerID="371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.953517 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.959884 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-znpx7"] Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.970228 4863 scope.go:117] "RemoveContainer" containerID="9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.996856 4863 scope.go:117] "RemoveContainer" containerID="67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df" Dec 05 07:51:27 crc kubenswrapper[4863]: E1205 07:51:27.997436 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df\": container with ID starting with 67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df not found: ID does not exist" containerID="67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.997549 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df"} err="failed to get container status \"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df\": rpc error: code = NotFound desc = could not find container \"67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df\": container with ID starting with 67148cf42fb3edc1c0f180443cc0267616e73226434b73d5ed7129b2eff061df not found: ID does not exist" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.997587 4863 scope.go:117] "RemoveContainer" containerID="371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14" Dec 05 07:51:27 crc kubenswrapper[4863]: E1205 07:51:27.997919 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14\": container with ID starting with 371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14 not found: ID does not exist" containerID="371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.997962 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14"} err="failed to get container status \"371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14\": rpc error: code = NotFound desc = could not find container \"371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14\": container with ID starting with 371c5889a36ed0eed3105bd22c4290b4c6497d9a3f46c32dd018f01abb4e7e14 not found: ID does not exist" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.997989 4863 scope.go:117] "RemoveContainer" containerID="9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3" Dec 05 07:51:27 crc kubenswrapper[4863]: E1205 07:51:27.998404 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3\": container with ID starting with 9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3 not found: ID does not exist" containerID="9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3" Dec 05 07:51:27 crc kubenswrapper[4863]: I1205 07:51:27.998426 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3"} err="failed to get container status \"9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3\": rpc error: code = NotFound desc = could not find container \"9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3\": container with ID starting with 9189d0a4252ee2291f80ab768cbf3f9b4591f11a1393610721f5df7383801ec3 not found: ID does not exist" Dec 05 07:51:28 crc kubenswrapper[4863]: I1205 07:51:28.619068 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" path="/var/lib/kubelet/pods/4230a661-77c4-40cc-bc13-ab4a1f0cc27b/volumes" Dec 05 07:51:30 crc kubenswrapper[4863]: I1205 07:51:30.601768 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:51:30 crc kubenswrapper[4863]: E1205 07:51:30.602107 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:51:42 crc kubenswrapper[4863]: I1205 07:51:42.612954 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:51:43 crc kubenswrapper[4863]: I1205 07:51:43.297742 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930"} Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.336690 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.337857 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="extract-content" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.337881 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="extract-content" Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.337914 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="extract-utilities" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.337927 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="extract-utilities" Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.337959 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="extract-utilities" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.337972 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="extract-utilities" Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.337990 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="extract-content" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.338191 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="extract-content" Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.338234 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.338248 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: E1205 07:53:20.338275 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.338288 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.338600 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4230a661-77c4-40cc-bc13-ab4a1f0cc27b" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.338630 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfecf4f1-a70a-4543-b0ce-ee75a8bbcd20" containerName="registry-server" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.340654 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.364352 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.426676 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.426801 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.426893 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw4hx\" (UniqueName: \"kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.528896 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.529001 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.529060 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw4hx\" (UniqueName: \"kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.529674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.529744 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.565630 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw4hx\" (UniqueName: \"kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx\") pod \"community-operators-qd4wd\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:20 crc kubenswrapper[4863]: I1205 07:53:20.669821 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:21 crc kubenswrapper[4863]: I1205 07:53:21.257231 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:22 crc kubenswrapper[4863]: I1205 07:53:22.206538 4863 generic.go:334] "Generic (PLEG): container finished" podID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerID="afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3" exitCode=0 Dec 05 07:53:22 crc kubenswrapper[4863]: I1205 07:53:22.206577 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerDied","Data":"afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3"} Dec 05 07:53:22 crc kubenswrapper[4863]: I1205 07:53:22.206604 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerStarted","Data":"7721176fec96540e0e766b13a432924396d1aeb59963fe094dec05978871201f"} Dec 05 07:53:22 crc kubenswrapper[4863]: I1205 07:53:22.209086 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:53:23 crc kubenswrapper[4863]: I1205 07:53:23.216061 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerStarted","Data":"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe"} Dec 05 07:53:24 crc kubenswrapper[4863]: I1205 07:53:24.229743 4863 generic.go:334] "Generic (PLEG): container finished" podID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerID="faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe" exitCode=0 Dec 05 07:53:24 crc kubenswrapper[4863]: I1205 07:53:24.230158 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerDied","Data":"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe"} Dec 05 07:53:25 crc kubenswrapper[4863]: I1205 07:53:25.237557 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerStarted","Data":"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f"} Dec 05 07:53:25 crc kubenswrapper[4863]: I1205 07:53:25.254250 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qd4wd" podStartSLOduration=2.819056997 podStartE2EDuration="5.254230874s" podCreationTimestamp="2025-12-05 07:53:20 +0000 UTC" firstStartedPulling="2025-12-05 07:53:22.208811252 +0000 UTC m=+4029.934808292" lastFinishedPulling="2025-12-05 07:53:24.643985119 +0000 UTC m=+4032.369982169" observedRunningTime="2025-12-05 07:53:25.25159363 +0000 UTC m=+4032.977590670" watchObservedRunningTime="2025-12-05 07:53:25.254230874 +0000 UTC m=+4032.980227924" Dec 05 07:53:30 crc kubenswrapper[4863]: I1205 07:53:30.670960 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:30 crc kubenswrapper[4863]: I1205 07:53:30.671456 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:30 crc kubenswrapper[4863]: I1205 07:53:30.728050 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:31 crc kubenswrapper[4863]: I1205 07:53:31.341191 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:31 crc kubenswrapper[4863]: I1205 07:53:31.394450 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.303862 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qd4wd" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="registry-server" containerID="cri-o://ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f" gracePeriod=2 Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.761572 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.843725 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw4hx\" (UniqueName: \"kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx\") pod \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.843971 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities\") pod \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.844451 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content\") pod \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\" (UID: \"2af4b2af-4511-4d0b-bd82-321c446dc8f0\") " Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.844799 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities" (OuterVolumeSpecName: "utilities") pod "2af4b2af-4511-4d0b-bd82-321c446dc8f0" (UID: "2af4b2af-4511-4d0b-bd82-321c446dc8f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.845414 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.850107 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx" (OuterVolumeSpecName: "kube-api-access-fw4hx") pod "2af4b2af-4511-4d0b-bd82-321c446dc8f0" (UID: "2af4b2af-4511-4d0b-bd82-321c446dc8f0"). InnerVolumeSpecName "kube-api-access-fw4hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.942058 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2af4b2af-4511-4d0b-bd82-321c446dc8f0" (UID: "2af4b2af-4511-4d0b-bd82-321c446dc8f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.947267 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af4b2af-4511-4d0b-bd82-321c446dc8f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:53:33 crc kubenswrapper[4863]: I1205 07:53:33.947309 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw4hx\" (UniqueName: \"kubernetes.io/projected/2af4b2af-4511-4d0b-bd82-321c446dc8f0-kube-api-access-fw4hx\") on node \"crc\" DevicePath \"\"" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.312795 4863 generic.go:334] "Generic (PLEG): container finished" podID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerID="ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f" exitCode=0 Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.312846 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerDied","Data":"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f"} Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.312888 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qd4wd" event={"ID":"2af4b2af-4511-4d0b-bd82-321c446dc8f0","Type":"ContainerDied","Data":"7721176fec96540e0e766b13a432924396d1aeb59963fe094dec05978871201f"} Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.312922 4863 scope.go:117] "RemoveContainer" containerID="ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.312963 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qd4wd" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.338425 4863 scope.go:117] "RemoveContainer" containerID="faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.366711 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.371092 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qd4wd"] Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.393104 4863 scope.go:117] "RemoveContainer" containerID="afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.411194 4863 scope.go:117] "RemoveContainer" containerID="ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f" Dec 05 07:53:34 crc kubenswrapper[4863]: E1205 07:53:34.411653 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f\": container with ID starting with ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f not found: ID does not exist" containerID="ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.411688 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f"} err="failed to get container status \"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f\": rpc error: code = NotFound desc = could not find container \"ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f\": container with ID starting with ef168f93a75bed6819971d8fb4bfbe636fb9fa78d21954ef77f747fce91c315f not found: ID does not exist" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.411739 4863 scope.go:117] "RemoveContainer" containerID="faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe" Dec 05 07:53:34 crc kubenswrapper[4863]: E1205 07:53:34.413108 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe\": container with ID starting with faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe not found: ID does not exist" containerID="faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.413173 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe"} err="failed to get container status \"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe\": rpc error: code = NotFound desc = could not find container \"faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe\": container with ID starting with faf857f628b61dfab81ba635ee0b3eb30fa322c48d762149d9be26db872863fe not found: ID does not exist" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.413188 4863 scope.go:117] "RemoveContainer" containerID="afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3" Dec 05 07:53:34 crc kubenswrapper[4863]: E1205 07:53:34.413443 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3\": container with ID starting with afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3 not found: ID does not exist" containerID="afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.413490 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3"} err="failed to get container status \"afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3\": rpc error: code = NotFound desc = could not find container \"afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3\": container with ID starting with afe2821b6ac79c498a80fae9b89d22390a462a4a87c040c1fdb8590694e3abb3 not found: ID does not exist" Dec 05 07:53:34 crc kubenswrapper[4863]: E1205 07:53:34.429000 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2af4b2af_4511_4d0b_bd82_321c446dc8f0.slice/crio-7721176fec96540e0e766b13a432924396d1aeb59963fe094dec05978871201f\": RecentStats: unable to find data in memory cache]" Dec 05 07:53:34 crc kubenswrapper[4863]: I1205 07:53:34.621708 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" path="/var/lib/kubelet/pods/2af4b2af-4511-4d0b-bd82-321c446dc8f0/volumes" Dec 05 07:54:08 crc kubenswrapper[4863]: I1205 07:54:08.463823 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:54:08 crc kubenswrapper[4863]: I1205 07:54:08.464644 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:54:38 crc kubenswrapper[4863]: I1205 07:54:38.464885 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:54:38 crc kubenswrapper[4863]: I1205 07:54:38.465789 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:55:08 crc kubenswrapper[4863]: I1205 07:55:08.464063 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:55:08 crc kubenswrapper[4863]: I1205 07:55:08.465751 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:55:08 crc kubenswrapper[4863]: I1205 07:55:08.465834 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:55:08 crc kubenswrapper[4863]: I1205 07:55:08.466967 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:55:08 crc kubenswrapper[4863]: I1205 07:55:08.467060 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930" gracePeriod=600 Dec 05 07:55:09 crc kubenswrapper[4863]: I1205 07:55:09.218928 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930" exitCode=0 Dec 05 07:55:09 crc kubenswrapper[4863]: I1205 07:55:09.219006 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930"} Dec 05 07:55:09 crc kubenswrapper[4863]: I1205 07:55:09.219346 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d"} Dec 05 07:55:09 crc kubenswrapper[4863]: I1205 07:55:09.219382 4863 scope.go:117] "RemoveContainer" containerID="06b639493dae9a6f5de9e0da40f1906092a89d804c5d80fd1147274860a44829" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.826610 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5sdgn"] Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.838028 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5sdgn"] Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.991585 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5cx5k"] Dec 05 07:56:41 crc kubenswrapper[4863]: E1205 07:56:41.992013 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="extract-utilities" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.992035 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="extract-utilities" Dec 05 07:56:41 crc kubenswrapper[4863]: E1205 07:56:41.992066 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="registry-server" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.992080 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="registry-server" Dec 05 07:56:41 crc kubenswrapper[4863]: E1205 07:56:41.992130 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="extract-content" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.992144 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="extract-content" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.992418 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af4b2af-4511-4d0b-bd82-321c446dc8f0" containerName="registry-server" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.993191 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.998149 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.998528 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 05 07:56:41 crc kubenswrapper[4863]: I1205 07:56:41.998667 4863 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nj7r9" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.000814 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.008672 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5cx5k"] Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.106246 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.106521 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.106648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6x2\" (UniqueName: \"kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.208622 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.208737 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.208817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6x2\" (UniqueName: \"kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.209401 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.209999 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.233907 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6x2\" (UniqueName: \"kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2\") pod \"crc-storage-crc-5cx5k\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.329675 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.612570 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="460d9ee9-73c2-4b9e-9056-4ff13b350e64" path="/var/lib/kubelet/pods/460d9ee9-73c2-4b9e-9056-4ff13b350e64/volumes" Dec 05 07:56:42 crc kubenswrapper[4863]: I1205 07:56:42.839321 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5cx5k"] Dec 05 07:56:43 crc kubenswrapper[4863]: I1205 07:56:43.098783 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5cx5k" event={"ID":"1a40ac64-3c30-4983-969e-06b6e01cb398","Type":"ContainerStarted","Data":"3164eb057269aaceaee34b77a06e209579c65ffdbd8b1b43a6d2f46adf1375c9"} Dec 05 07:56:44 crc kubenswrapper[4863]: I1205 07:56:44.110455 4863 generic.go:334] "Generic (PLEG): container finished" podID="1a40ac64-3c30-4983-969e-06b6e01cb398" containerID="9552eb7ec5c85eabe1cc3075430115ef1d6b735dad966d372bd84da20e0fb3fa" exitCode=0 Dec 05 07:56:44 crc kubenswrapper[4863]: I1205 07:56:44.110573 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5cx5k" event={"ID":"1a40ac64-3c30-4983-969e-06b6e01cb398","Type":"ContainerDied","Data":"9552eb7ec5c85eabe1cc3075430115ef1d6b735dad966d372bd84da20e0fb3fa"} Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.398737 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.563688 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h6x2\" (UniqueName: \"kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2\") pod \"1a40ac64-3c30-4983-969e-06b6e01cb398\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.563858 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage\") pod \"1a40ac64-3c30-4983-969e-06b6e01cb398\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.563896 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt\") pod \"1a40ac64-3c30-4983-969e-06b6e01cb398\" (UID: \"1a40ac64-3c30-4983-969e-06b6e01cb398\") " Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.564206 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1a40ac64-3c30-4983-969e-06b6e01cb398" (UID: "1a40ac64-3c30-4983-969e-06b6e01cb398"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.571700 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2" (OuterVolumeSpecName: "kube-api-access-4h6x2") pod "1a40ac64-3c30-4983-969e-06b6e01cb398" (UID: "1a40ac64-3c30-4983-969e-06b6e01cb398"). InnerVolumeSpecName "kube-api-access-4h6x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.595866 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1a40ac64-3c30-4983-969e-06b6e01cb398" (UID: "1a40ac64-3c30-4983-969e-06b6e01cb398"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.666343 4863 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a40ac64-3c30-4983-969e-06b6e01cb398-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.666531 4863 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a40ac64-3c30-4983-969e-06b6e01cb398-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:45 crc kubenswrapper[4863]: I1205 07:56:45.666556 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h6x2\" (UniqueName: \"kubernetes.io/projected/1a40ac64-3c30-4983-969e-06b6e01cb398-kube-api-access-4h6x2\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:46 crc kubenswrapper[4863]: I1205 07:56:46.127870 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5cx5k" event={"ID":"1a40ac64-3c30-4983-969e-06b6e01cb398","Type":"ContainerDied","Data":"3164eb057269aaceaee34b77a06e209579c65ffdbd8b1b43a6d2f46adf1375c9"} Dec 05 07:56:46 crc kubenswrapper[4863]: I1205 07:56:46.127906 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3164eb057269aaceaee34b77a06e209579c65ffdbd8b1b43a6d2f46adf1375c9" Dec 05 07:56:46 crc kubenswrapper[4863]: I1205 07:56:46.128221 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5cx5k" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.042872 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5cx5k"] Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.050068 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5cx5k"] Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.180176 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-n9mb8"] Dec 05 07:56:48 crc kubenswrapper[4863]: E1205 07:56:48.180537 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a40ac64-3c30-4983-969e-06b6e01cb398" containerName="storage" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.180553 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a40ac64-3c30-4983-969e-06b6e01cb398" containerName="storage" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.180745 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a40ac64-3c30-4983-969e-06b6e01cb398" containerName="storage" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.181318 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.188645 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.188788 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.188870 4863 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nj7r9" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.189546 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.198994 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n9mb8"] Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.319901 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.319974 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.320089 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sggfc\" (UniqueName: \"kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.421364 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.421452 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.421539 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sggfc\" (UniqueName: \"kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.421933 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.423440 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.461821 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sggfc\" (UniqueName: \"kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc\") pod \"crc-storage-crc-n9mb8\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.533021 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.618948 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a40ac64-3c30-4983-969e-06b6e01cb398" path="/var/lib/kubelet/pods/1a40ac64-3c30-4983-969e-06b6e01cb398/volumes" Dec 05 07:56:48 crc kubenswrapper[4863]: I1205 07:56:48.796791 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n9mb8"] Dec 05 07:56:49 crc kubenswrapper[4863]: I1205 07:56:49.158575 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n9mb8" event={"ID":"1c6807e1-dfd5-4785-860b-b8b362148cfe","Type":"ContainerStarted","Data":"a29d498f25328429a1831df35d8cf6ac6d7b8305cb03f33cd37ee4f4af0ace77"} Dec 05 07:56:50 crc kubenswrapper[4863]: I1205 07:56:50.172351 4863 generic.go:334] "Generic (PLEG): container finished" podID="1c6807e1-dfd5-4785-860b-b8b362148cfe" containerID="fa9e194491823700f8680629644de22986d69c9c8dad6d84449c503f7366d651" exitCode=0 Dec 05 07:56:50 crc kubenswrapper[4863]: I1205 07:56:50.172404 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n9mb8" event={"ID":"1c6807e1-dfd5-4785-860b-b8b362148cfe","Type":"ContainerDied","Data":"fa9e194491823700f8680629644de22986d69c9c8dad6d84449c503f7366d651"} Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.545733 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.673570 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sggfc\" (UniqueName: \"kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc\") pod \"1c6807e1-dfd5-4785-860b-b8b362148cfe\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.673700 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt\") pod \"1c6807e1-dfd5-4785-860b-b8b362148cfe\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.673807 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage\") pod \"1c6807e1-dfd5-4785-860b-b8b362148cfe\" (UID: \"1c6807e1-dfd5-4785-860b-b8b362148cfe\") " Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.673861 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1c6807e1-dfd5-4785-860b-b8b362148cfe" (UID: "1c6807e1-dfd5-4785-860b-b8b362148cfe"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.674100 4863 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1c6807e1-dfd5-4785-860b-b8b362148cfe-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.681990 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc" (OuterVolumeSpecName: "kube-api-access-sggfc") pod "1c6807e1-dfd5-4785-860b-b8b362148cfe" (UID: "1c6807e1-dfd5-4785-860b-b8b362148cfe"). InnerVolumeSpecName "kube-api-access-sggfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.707636 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1c6807e1-dfd5-4785-860b-b8b362148cfe" (UID: "1c6807e1-dfd5-4785-860b-b8b362148cfe"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.775664 4863 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1c6807e1-dfd5-4785-860b-b8b362148cfe-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:51 crc kubenswrapper[4863]: I1205 07:56:51.775714 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sggfc\" (UniqueName: \"kubernetes.io/projected/1c6807e1-dfd5-4785-860b-b8b362148cfe-kube-api-access-sggfc\") on node \"crc\" DevicePath \"\"" Dec 05 07:56:52 crc kubenswrapper[4863]: I1205 07:56:52.196754 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n9mb8" event={"ID":"1c6807e1-dfd5-4785-860b-b8b362148cfe","Type":"ContainerDied","Data":"a29d498f25328429a1831df35d8cf6ac6d7b8305cb03f33cd37ee4f4af0ace77"} Dec 05 07:56:52 crc kubenswrapper[4863]: I1205 07:56:52.196820 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a29d498f25328429a1831df35d8cf6ac6d7b8305cb03f33cd37ee4f4af0ace77" Dec 05 07:56:52 crc kubenswrapper[4863]: I1205 07:56:52.196824 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n9mb8" Dec 05 07:57:08 crc kubenswrapper[4863]: I1205 07:57:08.464216 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:57:08 crc kubenswrapper[4863]: I1205 07:57:08.464952 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:57:25 crc kubenswrapper[4863]: I1205 07:57:25.881507 4863 scope.go:117] "RemoveContainer" containerID="4311b5859a4cf10ce2f96dd92ed9ea11a86b3f8963046c1d92a718a1b11a058e" Dec 05 07:57:38 crc kubenswrapper[4863]: I1205 07:57:38.463806 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:57:38 crc kubenswrapper[4863]: I1205 07:57:38.464374 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.464134 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.465007 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.465079 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.466114 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.466267 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" gracePeriod=600 Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.924921 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" exitCode=0 Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.924972 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d"} Dec 05 07:58:08 crc kubenswrapper[4863]: I1205 07:58:08.925452 4863 scope.go:117] "RemoveContainer" containerID="000ba37efa1b2b1040d5951e2c590682eae9abbee9a07c24487113d756b3b930" Dec 05 07:58:09 crc kubenswrapper[4863]: E1205 07:58:09.203854 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:58:09 crc kubenswrapper[4863]: I1205 07:58:09.942924 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:58:09 crc kubenswrapper[4863]: E1205 07:58:09.943457 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:58:25 crc kubenswrapper[4863]: I1205 07:58:25.602095 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:58:25 crc kubenswrapper[4863]: E1205 07:58:25.604445 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:58:36 crc kubenswrapper[4863]: I1205 07:58:36.602136 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:58:36 crc kubenswrapper[4863]: E1205 07:58:36.603268 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:58:51 crc kubenswrapper[4863]: I1205 07:58:51.602337 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:58:51 crc kubenswrapper[4863]: E1205 07:58:51.602993 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:59:06 crc kubenswrapper[4863]: I1205 07:59:06.601949 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:59:06 crc kubenswrapper[4863]: E1205 07:59:06.603193 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:59:20 crc kubenswrapper[4863]: I1205 07:59:20.604132 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:59:20 crc kubenswrapper[4863]: E1205 07:59:20.606066 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:59:32 crc kubenswrapper[4863]: I1205 07:59:32.607940 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:59:32 crc kubenswrapper[4863]: E1205 07:59:32.608775 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:59:44 crc kubenswrapper[4863]: I1205 07:59:44.602385 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:59:44 crc kubenswrapper[4863]: E1205 07:59:44.603542 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 07:59:56 crc kubenswrapper[4863]: I1205 07:59:56.602060 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 07:59:56 crc kubenswrapper[4863]: E1205 07:59:56.603171 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.169912 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc"] Dec 05 08:00:00 crc kubenswrapper[4863]: E1205 08:00:00.170504 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c6807e1-dfd5-4785-860b-b8b362148cfe" containerName="storage" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.170517 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c6807e1-dfd5-4785-860b-b8b362148cfe" containerName="storage" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.170659 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c6807e1-dfd5-4785-860b-b8b362148cfe" containerName="storage" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.171114 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.174174 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.174928 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.185982 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc"] Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.283773 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.283988 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.284014 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w78jm\" (UniqueName: \"kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.386535 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.386606 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w78jm\" (UniqueName: \"kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.386663 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.388219 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.399832 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.413313 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w78jm\" (UniqueName: \"kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm\") pod \"collect-profiles-29415360-tt5vc\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.535370 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:00 crc kubenswrapper[4863]: I1205 08:00:00.976425 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc"] Dec 05 08:00:00 crc kubenswrapper[4863]: W1205 08:00:00.986641 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbce532d_1dd2_4f9e_8c6b_b44987ee5d37.slice/crio-93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a WatchSource:0}: Error finding container 93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a: Status 404 returned error can't find the container with id 93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a Dec 05 08:00:01 crc kubenswrapper[4863]: I1205 08:00:01.988701 4863 generic.go:334] "Generic (PLEG): container finished" podID="dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" containerID="d2684deda275f29d345f6032a8d7be2cc2be4fd9716046b41d7d0aa731fc32d4" exitCode=0 Dec 05 08:00:01 crc kubenswrapper[4863]: I1205 08:00:01.988816 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" event={"ID":"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37","Type":"ContainerDied","Data":"d2684deda275f29d345f6032a8d7be2cc2be4fd9716046b41d7d0aa731fc32d4"} Dec 05 08:00:01 crc kubenswrapper[4863]: I1205 08:00:01.989086 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" event={"ID":"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37","Type":"ContainerStarted","Data":"93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a"} Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.287564 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.426407 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume\") pod \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.426529 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w78jm\" (UniqueName: \"kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm\") pod \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.426553 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume\") pod \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\" (UID: \"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37\") " Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.428082 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume" (OuterVolumeSpecName: "config-volume") pod "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" (UID: "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.432329 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm" (OuterVolumeSpecName: "kube-api-access-w78jm") pod "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" (UID: "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37"). InnerVolumeSpecName "kube-api-access-w78jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.433812 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" (UID: "dbce532d-1dd2-4f9e-8c6b-b44987ee5d37"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.527669 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.527702 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w78jm\" (UniqueName: \"kubernetes.io/projected/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-kube-api-access-w78jm\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:03 crc kubenswrapper[4863]: I1205 08:00:03.527714 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.004821 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" event={"ID":"dbce532d-1dd2-4f9e-8c6b-b44987ee5d37","Type":"ContainerDied","Data":"93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a"} Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.005137 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93049dfa9587dd35c44dd3d5a1fea84cb660615ddcbaf0b2e2c054987aa07f5a" Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.005190 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc" Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.401508 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw"] Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.415561 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415315-g5fmw"] Dec 05 08:00:04 crc kubenswrapper[4863]: I1205 08:00:04.613325 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a930371-3d03-4e41-bad2-de418281ec35" path="/var/lib/kubelet/pods/8a930371-3d03-4e41-bad2-de418281ec35/volumes" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.373780 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:05 crc kubenswrapper[4863]: E1205 08:00:05.374097 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" containerName="collect-profiles" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.374108 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" containerName="collect-profiles" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.374242 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" containerName="collect-profiles" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.384943 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.396898 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.396973 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.397527 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.397773 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-kwcrj" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.397928 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.398064 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.563048 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.563129 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.563178 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq7gz\" (UniqueName: \"kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.605558 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.606658 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.619026 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.663961 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.664021 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.664138 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq7gz\" (UniqueName: \"kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.664747 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.665363 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.681012 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq7gz\" (UniqueName: \"kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz\") pod \"dnsmasq-dns-7688886755-m2wqz\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.704430 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.766190 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9rfv\" (UniqueName: \"kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.766340 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.766369 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.869086 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.869125 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.869160 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9rfv\" (UniqueName: \"kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.870299 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.872433 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.892405 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9rfv\" (UniqueName: \"kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv\") pod \"dnsmasq-dns-84496478f-knxcx\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:05 crc kubenswrapper[4863]: I1205 08:00:05.923787 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.144496 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.153205 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.357320 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:00:06 crc kubenswrapper[4863]: W1205 08:00:06.361862 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6047ca6_b4f6_472f_b9f9_cf96bd6bbb35.slice/crio-31e4e84f66f0de2f80f47640f517530a4b72bbff3f0266bcf5d054b6290f1129 WatchSource:0}: Error finding container 31e4e84f66f0de2f80f47640f517530a4b72bbff3f0266bcf5d054b6290f1129: Status 404 returned error can't find the container with id 31e4e84f66f0de2f80f47640f517530a4b72bbff3f0266bcf5d054b6290f1129 Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.522321 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.524626 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.530245 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.530712 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.530967 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.531162 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.531355 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-z4dnc" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.546657 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685425 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685528 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685575 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685594 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzs4t\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685633 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685668 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685698 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685724 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.685752 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.780040 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.783522 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.785809 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.785864 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.786067 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.786216 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.786294 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-9vm74" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787571 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787614 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzs4t\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787700 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787762 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787813 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.787919 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.788017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.788074 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.789624 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.790494 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.790869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.798788 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.802711 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.805757 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.805798 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fc250bde294d58e659f2178358a23d4754eae42b51ada5993213166c2643c9a0/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.810725 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.813537 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.814318 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.819555 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzs4t\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.852746 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " pod="openstack/rabbitmq-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.888838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.888887 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.888929 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.888975 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b7lx\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.889007 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.889034 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.889055 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.889073 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.889251 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991093 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991156 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991184 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b7lx\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991214 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991242 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991263 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991280 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991323 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991347 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.991957 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.992104 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.994531 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.994604 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5e46efeaff09c6dcf8581b6d734399b4aee049eff49ee67d45be5b1709347659/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.994828 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.996190 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.997591 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:06 crc kubenswrapper[4863]: I1205 08:00:06.998454 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.009513 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.013414 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b7lx\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.028585 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.029079 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84496478f-knxcx" event={"ID":"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35","Type":"ContainerStarted","Data":"31e4e84f66f0de2f80f47640f517530a4b72bbff3f0266bcf5d054b6290f1129"} Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.030714 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7688886755-m2wqz" event={"ID":"73114f45-ae83-48ed-ba4d-d7052a4113bc","Type":"ContainerStarted","Data":"ee2774d858553df97795ba267f7eefa5ccb0edabdb9ba02f6af553a9f2994d92"} Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.149078 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.166399 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.429291 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.432362 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.436530 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nhkdk" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.436695 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.437390 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.438852 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.441147 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.443912 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.600535 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.600764 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.600836 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5bjt\" (UniqueName: \"kubernetes.io/projected/47e708f8-fade-40dd-852c-8f6e08b8db54-kube-api-access-r5bjt\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.600919 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.601054 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.601094 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.601211 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:00:07 crc kubenswrapper[4863]: E1205 08:00:07.601459 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.601721 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.601767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.648833 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703499 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703552 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703701 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5bjt\" (UniqueName: \"kubernetes.io/projected/47e708f8-fade-40dd-852c-8f6e08b8db54-kube-api-access-r5bjt\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703882 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703904 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.703934 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.705015 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.705962 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.706812 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.707194 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e708f8-fade-40dd-852c-8f6e08b8db54-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.708451 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.711455 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.711499 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e84f58c9c855e2f335c31c2cfaa17d98806252db12f2a0470adba3acaeaae5d5/globalmount\"" pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.715422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e708f8-fade-40dd-852c-8f6e08b8db54-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.737921 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5bjt\" (UniqueName: \"kubernetes.io/projected/47e708f8-fade-40dd-852c-8f6e08b8db54-kube-api-access-r5bjt\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.758872 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:00:07 crc kubenswrapper[4863]: I1205 08:00:07.781737 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b559155b-0d0f-460e-9e25-2fffa9f4d99f\") pod \"openstack-galera-0\" (UID: \"47e708f8-fade-40dd-852c-8f6e08b8db54\") " pod="openstack/openstack-galera-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.030890 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.032918 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.035403 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-p9xtf" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.035607 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.040021 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.059047 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.072512 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerStarted","Data":"165d4adaede5502e3f94fe80ecb338c3fe1e80d7dfb00f78a7512eb9f098b47d"} Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.073528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerStarted","Data":"e0c64466d3046e861a0dcae30e6508514a753f0913127f04b9ba72362d65eec4"} Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.112706 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-kolla-config\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.112789 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-config-data\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.112838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvwq4\" (UniqueName: \"kubernetes.io/projected/70237596-6b95-48be-825a-c52559057fe9-kube-api-access-nvwq4\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.214579 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvwq4\" (UniqueName: \"kubernetes.io/projected/70237596-6b95-48be-825a-c52559057fe9-kube-api-access-nvwq4\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.214664 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-kolla-config\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.214747 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-config-data\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.216454 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-config-data\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.218235 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/70237596-6b95-48be-825a-c52559057fe9-kolla-config\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.233254 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvwq4\" (UniqueName: \"kubernetes.io/projected/70237596-6b95-48be-825a-c52559057fe9-kube-api-access-nvwq4\") pod \"memcached-0\" (UID: \"70237596-6b95-48be-825a-c52559057fe9\") " pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.360140 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.599879 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.758177 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: W1205 08:00:08.762332 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70237596_6b95_48be_825a_c52559057fe9.slice/crio-4a75c4fe9823d5be596d7c7a6354f15dc39c109efe07f94b0c1d849af9436f57 WatchSource:0}: Error finding container 4a75c4fe9823d5be596d7c7a6354f15dc39c109efe07f94b0c1d849af9436f57: Status 404 returned error can't find the container with id 4a75c4fe9823d5be596d7c7a6354f15dc39c109efe07f94b0c1d849af9436f57 Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.967388 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.993537 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.994025 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.997861 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.998102 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-dq8fp" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.998830 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 08:00:08 crc kubenswrapper[4863]: I1205 08:00:08.998861 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.083353 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e708f8-fade-40dd-852c-8f6e08b8db54","Type":"ContainerStarted","Data":"897efd5278ba5f43190b9667f68d15056ba39834008c10c3b43fd46ab918b52e"} Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.084524 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"70237596-6b95-48be-825a-c52559057fe9","Type":"ContainerStarted","Data":"4a75c4fe9823d5be596d7c7a6354f15dc39c109efe07f94b0c1d849af9436f57"} Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127684 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127738 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127796 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hjfr\" (UniqueName: \"kubernetes.io/projected/9453bfe6-ca55-416b-8f61-76557fad23aa-kube-api-access-4hjfr\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127826 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127859 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127910 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127940 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.127968 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.229515 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230025 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230066 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230136 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230181 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230243 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hjfr\" (UniqueName: \"kubernetes.io/projected/9453bfe6-ca55-416b-8f61-76557fad23aa-kube-api-access-4hjfr\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230269 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230313 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.230870 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.231090 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.231374 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9453bfe6-ca55-416b-8f61-76557fad23aa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.233526 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.233566 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d6c09f4e4692f45f50a19723d6a75d24fb2ce7ecf8c7f7d2c08d46cfec8d254e/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.236685 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.245693 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9453bfe6-ca55-416b-8f61-76557fad23aa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.247985 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hjfr\" (UniqueName: \"kubernetes.io/projected/9453bfe6-ca55-416b-8f61-76557fad23aa-kube-api-access-4hjfr\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.277067 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f306601-9d5a-4eaf-9c59-eae1251ad8ca\") pod \"openstack-cell1-galera-0\" (UID: \"9453bfe6-ca55-416b-8f61-76557fad23aa\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.317391 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:09 crc kubenswrapper[4863]: I1205 08:00:09.799080 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:00:09 crc kubenswrapper[4863]: W1205 08:00:09.807406 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9453bfe6_ca55_416b_8f61_76557fad23aa.slice/crio-0250d1d1de8466a87547263d18946fdde7e2d80e58a338c4682f3fce13d8beaa WatchSource:0}: Error finding container 0250d1d1de8466a87547263d18946fdde7e2d80e58a338c4682f3fce13d8beaa: Status 404 returned error can't find the container with id 0250d1d1de8466a87547263d18946fdde7e2d80e58a338c4682f3fce13d8beaa Dec 05 08:00:10 crc kubenswrapper[4863]: I1205 08:00:10.092687 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9453bfe6-ca55-416b-8f61-76557fad23aa","Type":"ContainerStarted","Data":"0250d1d1de8466a87547263d18946fdde7e2d80e58a338c4682f3fce13d8beaa"} Dec 05 08:00:22 crc kubenswrapper[4863]: I1205 08:00:22.601508 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:00:22 crc kubenswrapper[4863]: E1205 08:00:22.602151 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:00:25 crc kubenswrapper[4863]: I1205 08:00:25.990377 4863 scope.go:117] "RemoveContainer" containerID="6c2d3e837075646e349ab77498405e42aeda4312c7aab1549f67e5e5f136e9f8" Dec 05 08:00:27 crc kubenswrapper[4863]: E1205 08:00:27.557289 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:65066e8ca260a75886ae57f157049605" Dec 05 08:00:27 crc kubenswrapper[4863]: E1205 08:00:27.557380 4863 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:65066e8ca260a75886ae57f157049605" Dec 05 08:00:27 crc kubenswrapper[4863]: E1205 08:00:27.557611 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:65066e8ca260a75886ae57f157049605,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9b7lx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(d6f49d0c-23b5-48ea-8bd8-8949f42d291c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:00:27 crc kubenswrapper[4863]: E1205 08:00:27.558836 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" Dec 05 08:00:28 crc kubenswrapper[4863]: E1205 08:00:28.228170 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:65066e8ca260a75886ae57f157049605\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" Dec 05 08:00:29 crc kubenswrapper[4863]: E1205 08:00:29.196155 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:65066e8ca260a75886ae57f157049605" Dec 05 08:00:29 crc kubenswrapper[4863]: E1205 08:00:29.196239 4863 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:65066e8ca260a75886ae57f157049605" Dec 05 08:00:29 crc kubenswrapper[4863]: E1205 08:00:29.196388 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:65066e8ca260a75886ae57f157049605,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4hjfr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(9453bfe6-ca55-416b-8f61-76557fad23aa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:00:29 crc kubenswrapper[4863]: E1205 08:00:29.197519 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="9453bfe6-ca55-416b-8f61-76557fad23aa" Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.274687 4863 generic.go:334] "Generic (PLEG): container finished" podID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerID="17768845c6aaeb3683edee13af2db9c1507f8d38aa676565999d5840b3dcc124" exitCode=0 Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.276216 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84496478f-knxcx" event={"ID":"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35","Type":"ContainerDied","Data":"17768845c6aaeb3683edee13af2db9c1507f8d38aa676565999d5840b3dcc124"} Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.287940 4863 generic.go:334] "Generic (PLEG): container finished" podID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerID="751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241" exitCode=0 Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.288019 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7688886755-m2wqz" event={"ID":"73114f45-ae83-48ed-ba4d-d7052a4113bc","Type":"ContainerDied","Data":"751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241"} Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.294987 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9453bfe6-ca55-416b-8f61-76557fad23aa","Type":"ContainerStarted","Data":"54d56bf733b6378f1c5f14fc4de122fb1c85f661df6cf259af70b0e4328d1b30"} Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.300295 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e708f8-fade-40dd-852c-8f6e08b8db54","Type":"ContainerStarted","Data":"a74699557ef2ac0604b437599ca1c17e9fbd7b57d3340e86daa9ca5cd97637e9"} Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.302699 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"70237596-6b95-48be-825a-c52559057fe9","Type":"ContainerStarted","Data":"c6da8a0aabb4a0757e29227d7d13fbbbfb9d6b0d0298267e6d31227377eddaab"} Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.302970 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 08:00:30 crc kubenswrapper[4863]: I1205 08:00:30.354964 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.893358541 podStartE2EDuration="22.354938718s" podCreationTimestamp="2025-12-05 08:00:08 +0000 UTC" firstStartedPulling="2025-12-05 08:00:08.765326321 +0000 UTC m=+4436.491323361" lastFinishedPulling="2025-12-05 08:00:29.226906478 +0000 UTC m=+4456.952903538" observedRunningTime="2025-12-05 08:00:30.352411467 +0000 UTC m=+4458.078408507" watchObservedRunningTime="2025-12-05 08:00:30.354938718 +0000 UTC m=+4458.080935788" Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.329137 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerStarted","Data":"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0"} Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.332116 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84496478f-knxcx" event={"ID":"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35","Type":"ContainerStarted","Data":"7044e0ad4415c0a7cde7b8d1881feff42518852f7a0a238d2a8362e13c69d6b9"} Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.332227 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.334510 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7688886755-m2wqz" event={"ID":"73114f45-ae83-48ed-ba4d-d7052a4113bc","Type":"ContainerStarted","Data":"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe"} Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.418193 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7688886755-m2wqz" podStartSLOduration=3.275888318 podStartE2EDuration="26.418165778s" podCreationTimestamp="2025-12-05 08:00:05 +0000 UTC" firstStartedPulling="2025-12-05 08:00:06.152961653 +0000 UTC m=+4433.878958693" lastFinishedPulling="2025-12-05 08:00:29.295239113 +0000 UTC m=+4457.021236153" observedRunningTime="2025-12-05 08:00:31.393686747 +0000 UTC m=+4459.119683877" watchObservedRunningTime="2025-12-05 08:00:31.418165778 +0000 UTC m=+4459.144162858" Dec 05 08:00:31 crc kubenswrapper[4863]: I1205 08:00:31.429211 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84496478f-knxcx" podStartSLOduration=3.566017788 podStartE2EDuration="26.429190803s" podCreationTimestamp="2025-12-05 08:00:05 +0000 UTC" firstStartedPulling="2025-12-05 08:00:06.364726826 +0000 UTC m=+4434.090723876" lastFinishedPulling="2025-12-05 08:00:29.227899831 +0000 UTC m=+4456.953896891" observedRunningTime="2025-12-05 08:00:31.412221454 +0000 UTC m=+4459.138218534" watchObservedRunningTime="2025-12-05 08:00:31.429190803 +0000 UTC m=+4459.155187873" Dec 05 08:00:32 crc kubenswrapper[4863]: I1205 08:00:32.343093 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:33 crc kubenswrapper[4863]: E1205 08:00:33.281026 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9453bfe6_ca55_416b_8f61_76557fad23aa.slice/crio-54d56bf733b6378f1c5f14fc4de122fb1c85f661df6cf259af70b0e4328d1b30.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:00:33 crc kubenswrapper[4863]: I1205 08:00:33.352962 4863 generic.go:334] "Generic (PLEG): container finished" podID="9453bfe6-ca55-416b-8f61-76557fad23aa" containerID="54d56bf733b6378f1c5f14fc4de122fb1c85f661df6cf259af70b0e4328d1b30" exitCode=0 Dec 05 08:00:33 crc kubenswrapper[4863]: I1205 08:00:33.353068 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9453bfe6-ca55-416b-8f61-76557fad23aa","Type":"ContainerDied","Data":"54d56bf733b6378f1c5f14fc4de122fb1c85f661df6cf259af70b0e4328d1b30"} Dec 05 08:00:33 crc kubenswrapper[4863]: I1205 08:00:33.357753 4863 generic.go:334] "Generic (PLEG): container finished" podID="47e708f8-fade-40dd-852c-8f6e08b8db54" containerID="a74699557ef2ac0604b437599ca1c17e9fbd7b57d3340e86daa9ca5cd97637e9" exitCode=0 Dec 05 08:00:33 crc kubenswrapper[4863]: I1205 08:00:33.357842 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e708f8-fade-40dd-852c-8f6e08b8db54","Type":"ContainerDied","Data":"a74699557ef2ac0604b437599ca1c17e9fbd7b57d3340e86daa9ca5cd97637e9"} Dec 05 08:00:34 crc kubenswrapper[4863]: I1205 08:00:34.368216 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9453bfe6-ca55-416b-8f61-76557fad23aa","Type":"ContainerStarted","Data":"0a9e4bf7881e68901e4c6ddf6d9c58b390b48a3807895432f439db590b3ceb8a"} Dec 05 08:00:34 crc kubenswrapper[4863]: I1205 08:00:34.370901 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e708f8-fade-40dd-852c-8f6e08b8db54","Type":"ContainerStarted","Data":"e5d0b710443411304bf3555517b533cfece52ea66508b2e1a858a454adb42cc2"} Dec 05 08:00:34 crc kubenswrapper[4863]: I1205 08:00:34.407231 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223372009.447577 podStartE2EDuration="27.40719885s" podCreationTimestamp="2025-12-05 08:00:07 +0000 UTC" firstStartedPulling="2025-12-05 08:00:09.81032091 +0000 UTC m=+4437.536317950" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:00:34.399113226 +0000 UTC m=+4462.125110316" watchObservedRunningTime="2025-12-05 08:00:34.40719885 +0000 UTC m=+4462.133195940" Dec 05 08:00:34 crc kubenswrapper[4863]: I1205 08:00:34.427280 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.759176811 podStartE2EDuration="28.427259504s" podCreationTimestamp="2025-12-05 08:00:06 +0000 UTC" firstStartedPulling="2025-12-05 08:00:08.609850414 +0000 UTC m=+4436.335847454" lastFinishedPulling="2025-12-05 08:00:29.277933107 +0000 UTC m=+4457.003930147" observedRunningTime="2025-12-05 08:00:34.425200574 +0000 UTC m=+4462.151197664" watchObservedRunningTime="2025-12-05 08:00:34.427259504 +0000 UTC m=+4462.153256554" Dec 05 08:00:35 crc kubenswrapper[4863]: I1205 08:00:35.710718 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:35 crc kubenswrapper[4863]: I1205 08:00:35.927673 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:00:35 crc kubenswrapper[4863]: I1205 08:00:35.981686 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.397246 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7688886755-m2wqz" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="dnsmasq-dns" containerID="cri-o://54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe" gracePeriod=10 Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.606512 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:00:36 crc kubenswrapper[4863]: E1205 08:00:36.606980 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.839616 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.897914 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config\") pod \"73114f45-ae83-48ed-ba4d-d7052a4113bc\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.898016 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq7gz\" (UniqueName: \"kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz\") pod \"73114f45-ae83-48ed-ba4d-d7052a4113bc\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.898069 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc\") pod \"73114f45-ae83-48ed-ba4d-d7052a4113bc\" (UID: \"73114f45-ae83-48ed-ba4d-d7052a4113bc\") " Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.917597 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz" (OuterVolumeSpecName: "kube-api-access-dq7gz") pod "73114f45-ae83-48ed-ba4d-d7052a4113bc" (UID: "73114f45-ae83-48ed-ba4d-d7052a4113bc"). InnerVolumeSpecName "kube-api-access-dq7gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.946514 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "73114f45-ae83-48ed-ba4d-d7052a4113bc" (UID: "73114f45-ae83-48ed-ba4d-d7052a4113bc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:00:36 crc kubenswrapper[4863]: I1205 08:00:36.949971 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config" (OuterVolumeSpecName: "config") pod "73114f45-ae83-48ed-ba4d-d7052a4113bc" (UID: "73114f45-ae83-48ed-ba4d-d7052a4113bc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.000624 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.000664 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq7gz\" (UniqueName: \"kubernetes.io/projected/73114f45-ae83-48ed-ba4d-d7052a4113bc-kube-api-access-dq7gz\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.000683 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73114f45-ae83-48ed-ba4d-d7052a4113bc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.406286 4863 generic.go:334] "Generic (PLEG): container finished" podID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerID="54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe" exitCode=0 Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.406330 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7688886755-m2wqz" event={"ID":"73114f45-ae83-48ed-ba4d-d7052a4113bc","Type":"ContainerDied","Data":"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe"} Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.406366 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7688886755-m2wqz" event={"ID":"73114f45-ae83-48ed-ba4d-d7052a4113bc","Type":"ContainerDied","Data":"ee2774d858553df97795ba267f7eefa5ccb0edabdb9ba02f6af553a9f2994d92"} Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.406392 4863 scope.go:117] "RemoveContainer" containerID="54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.406393 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7688886755-m2wqz" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.427534 4863 scope.go:117] "RemoveContainer" containerID="751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.459375 4863 scope.go:117] "RemoveContainer" containerID="54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.459789 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:37 crc kubenswrapper[4863]: E1205 08:00:37.459982 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe\": container with ID starting with 54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe not found: ID does not exist" containerID="54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.460059 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe"} err="failed to get container status \"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe\": rpc error: code = NotFound desc = could not find container \"54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe\": container with ID starting with 54f07a9ebdd53d7e21ef1fc466f3e0c0ad9f33e7fda64f9e64b2f293dff209fe not found: ID does not exist" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.460125 4863 scope.go:117] "RemoveContainer" containerID="751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241" Dec 05 08:00:37 crc kubenswrapper[4863]: E1205 08:00:37.460814 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241\": container with ID starting with 751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241 not found: ID does not exist" containerID="751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.460853 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241"} err="failed to get container status \"751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241\": rpc error: code = NotFound desc = could not find container \"751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241\": container with ID starting with 751eebaa7f7b02fde66b9aa7751f016b41a713e1984b991b3e67ae3d4be79241 not found: ID does not exist" Dec 05 08:00:37 crc kubenswrapper[4863]: I1205 08:00:37.468248 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7688886755-m2wqz"] Dec 05 08:00:38 crc kubenswrapper[4863]: I1205 08:00:38.060312 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 08:00:38 crc kubenswrapper[4863]: I1205 08:00:38.060784 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 08:00:38 crc kubenswrapper[4863]: I1205 08:00:38.362021 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 08:00:38 crc kubenswrapper[4863]: I1205 08:00:38.612497 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" path="/var/lib/kubelet/pods/73114f45-ae83-48ed-ba4d-d7052a4113bc/volumes" Dec 05 08:00:39 crc kubenswrapper[4863]: I1205 08:00:39.318169 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:39 crc kubenswrapper[4863]: I1205 08:00:39.318751 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:39 crc kubenswrapper[4863]: I1205 08:00:39.418762 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:39 crc kubenswrapper[4863]: I1205 08:00:39.557459 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 08:00:40 crc kubenswrapper[4863]: I1205 08:00:40.483731 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 08:00:40 crc kubenswrapper[4863]: I1205 08:00:40.571013 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 08:00:44 crc kubenswrapper[4863]: I1205 08:00:44.488631 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerStarted","Data":"e1638af8d3c94f0620bce44c547b052bbcf0323ce99ff8054dde3ed651f64fc5"} Dec 05 08:00:51 crc kubenswrapper[4863]: I1205 08:00:51.602641 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:00:51 crc kubenswrapper[4863]: E1205 08:00:51.603546 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:01:04 crc kubenswrapper[4863]: I1205 08:01:04.689175 4863 generic.go:334] "Generic (PLEG): container finished" podID="d8d924f0-1309-4599-8911-036a6e1575db" containerID="5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0" exitCode=0 Dec 05 08:01:04 crc kubenswrapper[4863]: I1205 08:01:04.689838 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerDied","Data":"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0"} Dec 05 08:01:05 crc kubenswrapper[4863]: I1205 08:01:05.602441 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:01:05 crc kubenswrapper[4863]: E1205 08:01:05.603300 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:01:05 crc kubenswrapper[4863]: I1205 08:01:05.699782 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerStarted","Data":"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac"} Dec 05 08:01:05 crc kubenswrapper[4863]: I1205 08:01:05.700644 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 08:01:05 crc kubenswrapper[4863]: I1205 08:01:05.747789 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=39.188503139 podStartE2EDuration="1m0.747751775s" podCreationTimestamp="2025-12-05 08:00:05 +0000 UTC" firstStartedPulling="2025-12-05 08:00:07.662535498 +0000 UTC m=+4435.388532538" lastFinishedPulling="2025-12-05 08:00:29.221784114 +0000 UTC m=+4456.947781174" observedRunningTime="2025-12-05 08:01:05.733812319 +0000 UTC m=+4493.459809439" watchObservedRunningTime="2025-12-05 08:01:05.747751775 +0000 UTC m=+4493.473748855" Dec 05 08:01:16 crc kubenswrapper[4863]: I1205 08:01:16.820701 4863 generic.go:334] "Generic (PLEG): container finished" podID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerID="e1638af8d3c94f0620bce44c547b052bbcf0323ce99ff8054dde3ed651f64fc5" exitCode=0 Dec 05 08:01:16 crc kubenswrapper[4863]: I1205 08:01:16.820765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerDied","Data":"e1638af8d3c94f0620bce44c547b052bbcf0323ce99ff8054dde3ed651f64fc5"} Dec 05 08:01:17 crc kubenswrapper[4863]: I1205 08:01:17.152267 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 08:01:17 crc kubenswrapper[4863]: I1205 08:01:17.832733 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerStarted","Data":"b554914c3732735adf72cb98ea31b87d340ccf87b1284bcbab6eb63cce3d1f0d"} Dec 05 08:01:17 crc kubenswrapper[4863]: I1205 08:01:17.832909 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:17 crc kubenswrapper[4863]: I1205 08:01:17.870450 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371963.98436 podStartE2EDuration="1m12.870416461s" podCreationTimestamp="2025-12-05 08:00:05 +0000 UTC" firstStartedPulling="2025-12-05 08:00:07.810341049 +0000 UTC m=+4435.536338079" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:01:17.852147211 +0000 UTC m=+4505.578144261" watchObservedRunningTime="2025-12-05 08:01:17.870416461 +0000 UTC m=+4505.596413541" Dec 05 08:01:18 crc kubenswrapper[4863]: I1205 08:01:18.602243 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:01:18 crc kubenswrapper[4863]: E1205 08:01:18.602979 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:01:27 crc kubenswrapper[4863]: I1205 08:01:27.169656 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.476872 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:01:30 crc kubenswrapper[4863]: E1205 08:01:30.479026 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="dnsmasq-dns" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.479122 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="dnsmasq-dns" Dec 05 08:01:30 crc kubenswrapper[4863]: E1205 08:01:30.479227 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="init" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.479310 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="init" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.479602 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="73114f45-ae83-48ed-ba4d-d7052a4113bc" containerName="dnsmasq-dns" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.480858 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.490217 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.583747 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.583817 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.583844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twzqg\" (UniqueName: \"kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.685449 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.685542 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.685568 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twzqg\" (UniqueName: \"kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.687007 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.687410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.716507 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twzqg\" (UniqueName: \"kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg\") pod \"dnsmasq-dns-778d75ccf7-9njb9\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:30 crc kubenswrapper[4863]: I1205 08:01:30.832915 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.065382 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.279267 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.601259 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:01:31 crc kubenswrapper[4863]: E1205 08:01:31.601831 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.990752 4863 generic.go:334] "Generic (PLEG): container finished" podID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerID="fc82a5b7499b9c6ce777b8dcdb76a11ef4681c510dfb8feea0ac84a404f237e8" exitCode=0 Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.990863 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" event={"ID":"d49ebb46-e0a0-403f-814a-04bd6408dc24","Type":"ContainerDied","Data":"fc82a5b7499b9c6ce777b8dcdb76a11ef4681c510dfb8feea0ac84a404f237e8"} Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.990955 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" event={"ID":"d49ebb46-e0a0-403f-814a-04bd6408dc24","Type":"ContainerStarted","Data":"a4d4a2750a14817df44a6dd095a48551819ab7e384e7b4832c6ac0cea12412c8"} Dec 05 08:01:31 crc kubenswrapper[4863]: I1205 08:01:31.995691 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:33 crc kubenswrapper[4863]: I1205 08:01:33.001825 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" event={"ID":"d49ebb46-e0a0-403f-814a-04bd6408dc24","Type":"ContainerStarted","Data":"8d8fba718452c18c9f09275f4c23cbf14a01f1bf8d69e72e59f04fdc5ab25d2a"} Dec 05 08:01:33 crc kubenswrapper[4863]: I1205 08:01:33.002170 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:33 crc kubenswrapper[4863]: I1205 08:01:33.024627 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" podStartSLOduration=3.024606493 podStartE2EDuration="3.024606493s" podCreationTimestamp="2025-12-05 08:01:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:01:33.019350547 +0000 UTC m=+4520.745347587" watchObservedRunningTime="2025-12-05 08:01:33.024606493 +0000 UTC m=+4520.750603533" Dec 05 08:01:33 crc kubenswrapper[4863]: I1205 08:01:33.046536 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="rabbitmq" containerID="cri-o://bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac" gracePeriod=604799 Dec 05 08:01:33 crc kubenswrapper[4863]: I1205 08:01:33.802205 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="rabbitmq" containerID="cri-o://b554914c3732735adf72cb98ea31b87d340ccf87b1284bcbab6eb63cce3d1f0d" gracePeriod=604799 Dec 05 08:01:37 crc kubenswrapper[4863]: I1205 08:01:37.150771 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.232:5672: connect: connection refused" Dec 05 08:01:37 crc kubenswrapper[4863]: I1205 08:01:37.167940 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.233:5672: connect: connection refused" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.660204 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679600 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679732 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzs4t\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679786 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679820 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679884 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.679931 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.680032 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.680224 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.680272 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd\") pod \"d8d924f0-1309-4599-8911-036a6e1575db\" (UID: \"d8d924f0-1309-4599-8911-036a6e1575db\") " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.680661 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.680941 4863 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.682782 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.682990 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.697633 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info" (OuterVolumeSpecName: "pod-info") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.700567 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.703594 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t" (OuterVolumeSpecName: "kube-api-access-gzs4t") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "kube-api-access-gzs4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.739537 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf" (OuterVolumeSpecName: "server-conf") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.757887 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86" (OuterVolumeSpecName: "persistence") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "pvc-23e37f8f-2698-4d94-b35d-161cae930c86". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782212 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzs4t\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-kube-api-access-gzs4t\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782249 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782259 4863 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d8d924f0-1309-4599-8911-036a6e1575db-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782270 4863 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d8d924f0-1309-4599-8911-036a6e1575db-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782279 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782288 4863 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d8d924f0-1309-4599-8911-036a6e1575db-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.782318 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") on node \"crc\" " Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.800539 4863 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.800712 4863 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-23e37f8f-2698-4d94-b35d-161cae930c86" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86") on node "crc" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.803062 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d8d924f0-1309-4599-8911-036a6e1575db" (UID: "d8d924f0-1309-4599-8911-036a6e1575db"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.884110 4863 reconciler_common.go:293] "Volume detached for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:39 crc kubenswrapper[4863]: I1205 08:01:39.884142 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d8d924f0-1309-4599-8911-036a6e1575db-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.079575 4863 generic.go:334] "Generic (PLEG): container finished" podID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerID="b554914c3732735adf72cb98ea31b87d340ccf87b1284bcbab6eb63cce3d1f0d" exitCode=0 Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.079648 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerDied","Data":"b554914c3732735adf72cb98ea31b87d340ccf87b1284bcbab6eb63cce3d1f0d"} Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.085747 4863 generic.go:334] "Generic (PLEG): container finished" podID="d8d924f0-1309-4599-8911-036a6e1575db" containerID="bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac" exitCode=0 Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.085788 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerDied","Data":"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac"} Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.085821 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d8d924f0-1309-4599-8911-036a6e1575db","Type":"ContainerDied","Data":"165d4adaede5502e3f94fe80ecb338c3fe1e80d7dfb00f78a7512eb9f098b47d"} Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.085818 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.085844 4863 scope.go:117] "RemoveContainer" containerID="bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.142082 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.151293 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.154732 4863 scope.go:117] "RemoveContainer" containerID="5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.187675 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:40 crc kubenswrapper[4863]: E1205 08:01:40.188137 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="rabbitmq" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.188225 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="rabbitmq" Dec 05 08:01:40 crc kubenswrapper[4863]: E1205 08:01:40.188311 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="setup-container" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.188362 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="setup-container" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.188560 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8d924f0-1309-4599-8911-036a6e1575db" containerName="rabbitmq" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.189394 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.196143 4863 scope.go:117] "RemoveContainer" containerID="bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac" Dec 05 08:01:40 crc kubenswrapper[4863]: E1205 08:01:40.198988 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac\": container with ID starting with bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac not found: ID does not exist" containerID="bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.199034 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac"} err="failed to get container status \"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac\": rpc error: code = NotFound desc = could not find container \"bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac\": container with ID starting with bc16c736c128590c42497e0b50b8f6792dbdfd5fd863e08aabc82026db998eac not found: ID does not exist" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.199061 4863 scope.go:117] "RemoveContainer" containerID="5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.199078 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.199333 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.199363 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-z4dnc" Dec 05 08:01:40 crc kubenswrapper[4863]: E1205 08:01:40.200152 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0\": container with ID starting with 5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0 not found: ID does not exist" containerID="5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.200199 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0"} err="failed to get container status \"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0\": rpc error: code = NotFound desc = could not find container \"5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0\": container with ID starting with 5dc143719c84b6b7b5a6334a23bfb9466d831f38d82e53a484de5498aee139d0 not found: ID does not exist" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.200168 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.200214 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.202336 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390481 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf4xg\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-kube-api-access-zf4xg\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390792 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b59a534f-9073-4dd4-aaae-75fcde18ffda-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390821 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390856 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390906 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b59a534f-9073-4dd4-aaae-75fcde18ffda-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390930 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390945 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390969 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.390986 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.491909 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.491990 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b59a534f-9073-4dd4-aaae-75fcde18ffda-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492020 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492035 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492059 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492114 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf4xg\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-kube-api-access-zf4xg\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492142 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b59a534f-9073-4dd4-aaae-75fcde18ffda-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492168 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.492505 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.493014 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.493325 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.493366 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b59a534f-9073-4dd4-aaae-75fcde18ffda-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.495520 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.495556 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fc250bde294d58e659f2178358a23d4754eae42b51ada5993213166c2643c9a0/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.498285 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.498905 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b59a534f-9073-4dd4-aaae-75fcde18ffda-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.501972 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b59a534f-9073-4dd4-aaae-75fcde18ffda-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.512208 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf4xg\" (UniqueName: \"kubernetes.io/projected/b59a534f-9073-4dd4-aaae-75fcde18ffda-kube-api-access-zf4xg\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.532461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-23e37f8f-2698-4d94-b35d-161cae930c86\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-23e37f8f-2698-4d94-b35d-161cae930c86\") pod \"rabbitmq-server-0\" (UID: \"b59a534f-9073-4dd4-aaae-75fcde18ffda\") " pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.569899 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.593497 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b7lx\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.593540 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.593579 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.593635 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594180 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.593667 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594400 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594439 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594496 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594505 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594611 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\" (UID: \"d6f49d0c-23b5-48ea-8bd8-8949f42d291c\") " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.594994 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.595670 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.595694 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.596462 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx" (OuterVolumeSpecName: "kube-api-access-9b7lx") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "kube-api-access-9b7lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.598792 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.609085 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info" (OuterVolumeSpecName: "pod-info") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.636605 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf" (OuterVolumeSpecName: "server-conf") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.637811 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c" (OuterVolumeSpecName: "persistence") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "pvc-da845ed9-2c1a-458e-9f76-abe336869d8c". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.649015 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8d924f0-1309-4599-8911-036a6e1575db" path="/var/lib/kubelet/pods/d8d924f0-1309-4599-8911-036a6e1575db/volumes" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696592 4863 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696621 4863 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696629 4863 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696655 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") on node \"crc\" " Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696670 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b7lx\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-kube-api-access-9b7lx\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.696681 4863 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.726076 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d6f49d0c-23b5-48ea-8bd8-8949f42d291c" (UID: "d6f49d0c-23b5-48ea-8bd8-8949f42d291c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.734127 4863 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.734292 4863 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-da845ed9-2c1a-458e-9f76-abe336869d8c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c") on node "crc" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.797704 4863 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6f49d0c-23b5-48ea-8bd8-8949f42d291c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.797746 4863 reconciler_common.go:293] "Volume detached for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.813323 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.834619 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.887056 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.888665 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84496478f-knxcx" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="dnsmasq-dns" containerID="cri-o://7044e0ad4415c0a7cde7b8d1881feff42518852f7a0a238d2a8362e13c69d6b9" gracePeriod=10 Dec 05 08:01:40 crc kubenswrapper[4863]: I1205 08:01:40.926506 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84496478f-knxcx" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.231:5353: connect: connection refused" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.098750 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d6f49d0c-23b5-48ea-8bd8-8949f42d291c","Type":"ContainerDied","Data":"e0c64466d3046e861a0dcae30e6508514a753f0913127f04b9ba72362d65eec4"} Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.098812 4863 scope.go:117] "RemoveContainer" containerID="b554914c3732735adf72cb98ea31b87d340ccf87b1284bcbab6eb63cce3d1f0d" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.099638 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.103627 4863 generic.go:334] "Generic (PLEG): container finished" podID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerID="7044e0ad4415c0a7cde7b8d1881feff42518852f7a0a238d2a8362e13c69d6b9" exitCode=0 Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.103688 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84496478f-knxcx" event={"ID":"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35","Type":"ContainerDied","Data":"7044e0ad4415c0a7cde7b8d1881feff42518852f7a0a238d2a8362e13c69d6b9"} Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.122562 4863 scope.go:117] "RemoveContainer" containerID="e1638af8d3c94f0620bce44c547b052bbcf0323ce99ff8054dde3ed651f64fc5" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.145851 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.184770 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.194532 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:41 crc kubenswrapper[4863]: E1205 08:01:41.195143 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="setup-container" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.195164 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="setup-container" Dec 05 08:01:41 crc kubenswrapper[4863]: E1205 08:01:41.195208 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="rabbitmq" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.195215 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="rabbitmq" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.195557 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" containerName="rabbitmq" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.197713 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.203219 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-9vm74" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.203348 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.203746 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.204950 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.205087 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211733 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211822 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211843 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211871 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211894 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f62d64b0-f790-4c51-9777-6141cbba6a79-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211922 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f62d64b0-f790-4c51-9777-6141cbba6a79-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211956 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b72jj\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-kube-api-access-b72jj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.211978 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.220073 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.278033 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313213 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313264 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313304 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f62d64b0-f790-4c51-9777-6141cbba6a79-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313327 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f62d64b0-f790-4c51-9777-6141cbba6a79-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313371 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b72jj\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-kube-api-access-b72jj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313394 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313428 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313458 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.313548 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.315675 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.315857 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.316221 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.316941 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f62d64b0-f790-4c51-9777-6141cbba6a79-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.317362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f62d64b0-f790-4c51-9777-6141cbba6a79-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.318369 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.318412 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5e46efeaff09c6dcf8581b6d734399b4aee049eff49ee67d45be5b1709347659/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.322273 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f62d64b0-f790-4c51-9777-6141cbba6a79-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.322793 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.330861 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b72jj\" (UniqueName: \"kubernetes.io/projected/f62d64b0-f790-4c51-9777-6141cbba6a79-kube-api-access-b72jj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.355024 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da845ed9-2c1a-458e-9f76-abe336869d8c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f62d64b0-f790-4c51-9777-6141cbba6a79\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.469431 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.523447 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.616732 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc\") pod \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.617140 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9rfv\" (UniqueName: \"kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv\") pod \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.617277 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config\") pod \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\" (UID: \"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35\") " Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.651960 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv" (OuterVolumeSpecName: "kube-api-access-n9rfv") pod "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" (UID: "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35"). InnerVolumeSpecName "kube-api-access-n9rfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.665122 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:41 crc kubenswrapper[4863]: E1205 08:01:41.665429 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="init" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.665446 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="init" Dec 05 08:01:41 crc kubenswrapper[4863]: E1205 08:01:41.665462 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="dnsmasq-dns" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.665467 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="dnsmasq-dns" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.665649 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" containerName="dnsmasq-dns" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.666760 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.678315 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" (UID: "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.682178 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.701615 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config" (OuterVolumeSpecName: "config") pod "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" (UID: "d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.719660 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9rfv\" (UniqueName: \"kubernetes.io/projected/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-kube-api-access-n9rfv\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.719697 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.719710 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.821411 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.821461 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7wqd\" (UniqueName: \"kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.821615 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.923419 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7wqd\" (UniqueName: \"kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.923630 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.923666 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.924085 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.924188 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:41 crc kubenswrapper[4863]: I1205 08:01:41.940386 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7wqd\" (UniqueName: \"kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd\") pod \"certified-operators-4ltnw\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.003227 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.020534 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:01:42 crc kubenswrapper[4863]: W1205 08:01:42.028689 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf62d64b0_f790_4c51_9777_6141cbba6a79.slice/crio-23dbf243bb5dc8f9cf68ad219f3eec3237fb695c7d86be54162c0c6aa495f90f WatchSource:0}: Error finding container 23dbf243bb5dc8f9cf68ad219f3eec3237fb695c7d86be54162c0c6aa495f90f: Status 404 returned error can't find the container with id 23dbf243bb5dc8f9cf68ad219f3eec3237fb695c7d86be54162c0c6aa495f90f Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.111372 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f62d64b0-f790-4c51-9777-6141cbba6a79","Type":"ContainerStarted","Data":"23dbf243bb5dc8f9cf68ad219f3eec3237fb695c7d86be54162c0c6aa495f90f"} Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.113687 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84496478f-knxcx" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.113741 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84496478f-knxcx" event={"ID":"d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35","Type":"ContainerDied","Data":"31e4e84f66f0de2f80f47640f517530a4b72bbff3f0266bcf5d054b6290f1129"} Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.113803 4863 scope.go:117] "RemoveContainer" containerID="7044e0ad4415c0a7cde7b8d1881feff42518852f7a0a238d2a8362e13c69d6b9" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.122104 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b59a534f-9073-4dd4-aaae-75fcde18ffda","Type":"ContainerStarted","Data":"e9f43c59b4b05f5b7ebd2986097db6ebea013e3e4b72a62b82605e9b02757caf"} Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.153140 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.161615 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84496478f-knxcx"] Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.191390 4863 scope.go:117] "RemoveContainer" containerID="17768845c6aaeb3683edee13af2db9c1507f8d38aa676565999d5840b3dcc124" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.273321 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.612204 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35" path="/var/lib/kubelet/pods/d6047ca6-b4f6-472f-b9f9-cf96bd6bbb35/volumes" Dec 05 08:01:42 crc kubenswrapper[4863]: I1205 08:01:42.612905 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6f49d0c-23b5-48ea-8bd8-8949f42d291c" path="/var/lib/kubelet/pods/d6f49d0c-23b5-48ea-8bd8-8949f42d291c/volumes" Dec 05 08:01:43 crc kubenswrapper[4863]: I1205 08:01:43.132541 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d16facf-0052-49ae-979c-2788e6023f4c" containerID="b5f97070b6a4dad9be6e672d78a8b6fd2435a194248f93d4734943425a7b1f0e" exitCode=0 Dec 05 08:01:43 crc kubenswrapper[4863]: I1205 08:01:43.132591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerDied","Data":"b5f97070b6a4dad9be6e672d78a8b6fd2435a194248f93d4734943425a7b1f0e"} Dec 05 08:01:43 crc kubenswrapper[4863]: I1205 08:01:43.132888 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerStarted","Data":"a1c6d34057fb8d11e2350aeb491a427129bcdd1e9b1e7cfdc6f2b1c66b76766b"} Dec 05 08:01:43 crc kubenswrapper[4863]: I1205 08:01:43.136162 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b59a534f-9073-4dd4-aaae-75fcde18ffda","Type":"ContainerStarted","Data":"c65f024338272772fcab3fb7aa22043be723fad4fded28e3615966f0dbe74879"} Dec 05 08:01:44 crc kubenswrapper[4863]: I1205 08:01:44.146203 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f62d64b0-f790-4c51-9777-6141cbba6a79","Type":"ContainerStarted","Data":"f5bf85f2309ef85a5049ce08437fb12bac1234f50f643888c19334f41a1415fb"} Dec 05 08:01:44 crc kubenswrapper[4863]: I1205 08:01:44.149738 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerStarted","Data":"8318f37ff1f77dffb94f7c2f62a9ff630399073da260b1e53db22fea7bc299e4"} Dec 05 08:01:45 crc kubenswrapper[4863]: I1205 08:01:45.160584 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d16facf-0052-49ae-979c-2788e6023f4c" containerID="8318f37ff1f77dffb94f7c2f62a9ff630399073da260b1e53db22fea7bc299e4" exitCode=0 Dec 05 08:01:45 crc kubenswrapper[4863]: I1205 08:01:45.160676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerDied","Data":"8318f37ff1f77dffb94f7c2f62a9ff630399073da260b1e53db22fea7bc299e4"} Dec 05 08:01:45 crc kubenswrapper[4863]: I1205 08:01:45.602167 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:01:45 crc kubenswrapper[4863]: E1205 08:01:45.602366 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:01:46 crc kubenswrapper[4863]: I1205 08:01:46.174120 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerStarted","Data":"880486a6cff90c343fca00062f1089e06ecdf9114d50a145b272d0fbed6a1321"} Dec 05 08:01:46 crc kubenswrapper[4863]: I1205 08:01:46.196273 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4ltnw" podStartSLOduration=2.741146376 podStartE2EDuration="5.196253024s" podCreationTimestamp="2025-12-05 08:01:41 +0000 UTC" firstStartedPulling="2025-12-05 08:01:43.135630466 +0000 UTC m=+4530.861627506" lastFinishedPulling="2025-12-05 08:01:45.590737114 +0000 UTC m=+4533.316734154" observedRunningTime="2025-12-05 08:01:46.191064448 +0000 UTC m=+4533.917061528" watchObservedRunningTime="2025-12-05 08:01:46.196253024 +0000 UTC m=+4533.922250084" Dec 05 08:01:52 crc kubenswrapper[4863]: I1205 08:01:52.004051 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:52 crc kubenswrapper[4863]: I1205 08:01:52.004690 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:52 crc kubenswrapper[4863]: I1205 08:01:52.050350 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:52 crc kubenswrapper[4863]: I1205 08:01:52.303312 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:52 crc kubenswrapper[4863]: I1205 08:01:52.357932 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:54 crc kubenswrapper[4863]: I1205 08:01:54.230404 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4ltnw" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="registry-server" containerID="cri-o://880486a6cff90c343fca00062f1089e06ecdf9114d50a145b272d0fbed6a1321" gracePeriod=2 Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.245117 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d16facf-0052-49ae-979c-2788e6023f4c" containerID="880486a6cff90c343fca00062f1089e06ecdf9114d50a145b272d0fbed6a1321" exitCode=0 Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.245243 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerDied","Data":"880486a6cff90c343fca00062f1089e06ecdf9114d50a145b272d0fbed6a1321"} Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.816912 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.850502 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7wqd\" (UniqueName: \"kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd\") pod \"1d16facf-0052-49ae-979c-2788e6023f4c\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.850580 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities\") pod \"1d16facf-0052-49ae-979c-2788e6023f4c\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.850632 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content\") pod \"1d16facf-0052-49ae-979c-2788e6023f4c\" (UID: \"1d16facf-0052-49ae-979c-2788e6023f4c\") " Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.851657 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities" (OuterVolumeSpecName: "utilities") pod "1d16facf-0052-49ae-979c-2788e6023f4c" (UID: "1d16facf-0052-49ae-979c-2788e6023f4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.859333 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd" (OuterVolumeSpecName: "kube-api-access-x7wqd") pod "1d16facf-0052-49ae-979c-2788e6023f4c" (UID: "1d16facf-0052-49ae-979c-2788e6023f4c"). InnerVolumeSpecName "kube-api-access-x7wqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.903845 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d16facf-0052-49ae-979c-2788e6023f4c" (UID: "1d16facf-0052-49ae-979c-2788e6023f4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.952154 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.952211 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d16facf-0052-49ae-979c-2788e6023f4c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:55 crc kubenswrapper[4863]: I1205 08:01:55.952221 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7wqd\" (UniqueName: \"kubernetes.io/projected/1d16facf-0052-49ae-979c-2788e6023f4c-kube-api-access-x7wqd\") on node \"crc\" DevicePath \"\"" Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.259159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ltnw" event={"ID":"1d16facf-0052-49ae-979c-2788e6023f4c","Type":"ContainerDied","Data":"a1c6d34057fb8d11e2350aeb491a427129bcdd1e9b1e7cfdc6f2b1c66b76766b"} Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.259314 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ltnw" Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.259354 4863 scope.go:117] "RemoveContainer" containerID="880486a6cff90c343fca00062f1089e06ecdf9114d50a145b272d0fbed6a1321" Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.289279 4863 scope.go:117] "RemoveContainer" containerID="8318f37ff1f77dffb94f7c2f62a9ff630399073da260b1e53db22fea7bc299e4" Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.311644 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.319304 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4ltnw"] Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.331545 4863 scope.go:117] "RemoveContainer" containerID="b5f97070b6a4dad9be6e672d78a8b6fd2435a194248f93d4734943425a7b1f0e" Dec 05 08:01:56 crc kubenswrapper[4863]: I1205 08:01:56.634002 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" path="/var/lib/kubelet/pods/1d16facf-0052-49ae-979c-2788e6023f4c/volumes" Dec 05 08:01:58 crc kubenswrapper[4863]: I1205 08:01:58.603343 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:01:58 crc kubenswrapper[4863]: E1205 08:01:58.604147 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:12 crc kubenswrapper[4863]: I1205 08:02:12.613153 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:02:12 crc kubenswrapper[4863]: E1205 08:02:12.614351 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:15 crc kubenswrapper[4863]: I1205 08:02:15.434335 4863 generic.go:334] "Generic (PLEG): container finished" podID="b59a534f-9073-4dd4-aaae-75fcde18ffda" containerID="c65f024338272772fcab3fb7aa22043be723fad4fded28e3615966f0dbe74879" exitCode=0 Dec 05 08:02:15 crc kubenswrapper[4863]: I1205 08:02:15.434435 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b59a534f-9073-4dd4-aaae-75fcde18ffda","Type":"ContainerDied","Data":"c65f024338272772fcab3fb7aa22043be723fad4fded28e3615966f0dbe74879"} Dec 05 08:02:16 crc kubenswrapper[4863]: I1205 08:02:16.448646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b59a534f-9073-4dd4-aaae-75fcde18ffda","Type":"ContainerStarted","Data":"0477ac89f6233021de1e8754fc134fc5c3c652e796706b948eb08cb79dd54a37"} Dec 05 08:02:16 crc kubenswrapper[4863]: I1205 08:02:16.449235 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 08:02:16 crc kubenswrapper[4863]: I1205 08:02:16.450972 4863 generic.go:334] "Generic (PLEG): container finished" podID="f62d64b0-f790-4c51-9777-6141cbba6a79" containerID="f5bf85f2309ef85a5049ce08437fb12bac1234f50f643888c19334f41a1415fb" exitCode=0 Dec 05 08:02:16 crc kubenswrapper[4863]: I1205 08:02:16.451012 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f62d64b0-f790-4c51-9777-6141cbba6a79","Type":"ContainerDied","Data":"f5bf85f2309ef85a5049ce08437fb12bac1234f50f643888c19334f41a1415fb"} Dec 05 08:02:16 crc kubenswrapper[4863]: I1205 08:02:16.475786 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.475762343 podStartE2EDuration="36.475762343s" podCreationTimestamp="2025-12-05 08:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:02:16.475114428 +0000 UTC m=+4564.201111458" watchObservedRunningTime="2025-12-05 08:02:16.475762343 +0000 UTC m=+4564.201759383" Dec 05 08:02:17 crc kubenswrapper[4863]: I1205 08:02:17.459804 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f62d64b0-f790-4c51-9777-6141cbba6a79","Type":"ContainerStarted","Data":"80c0336b161a382208719a80e6b3c870121558e575a231d3c30fac389e9b85a4"} Dec 05 08:02:17 crc kubenswrapper[4863]: I1205 08:02:17.460440 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:02:17 crc kubenswrapper[4863]: I1205 08:02:17.487583 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.487567124 podStartE2EDuration="36.487567124s" podCreationTimestamp="2025-12-05 08:01:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:02:17.479921089 +0000 UTC m=+4565.205918139" watchObservedRunningTime="2025-12-05 08:02:17.487567124 +0000 UTC m=+4565.213564164" Dec 05 08:02:23 crc kubenswrapper[4863]: I1205 08:02:23.601926 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:02:23 crc kubenswrapper[4863]: E1205 08:02:23.602913 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:30 crc kubenswrapper[4863]: I1205 08:02:30.818681 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 08:02:31 crc kubenswrapper[4863]: I1205 08:02:31.526778 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:02:34 crc kubenswrapper[4863]: I1205 08:02:34.602188 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:02:34 crc kubenswrapper[4863]: E1205 08:02:34.602709 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.052720 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Dec 05 08:02:43 crc kubenswrapper[4863]: E1205 08:02:43.053716 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="registry-server" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.053733 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="registry-server" Dec 05 08:02:43 crc kubenswrapper[4863]: E1205 08:02:43.053749 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="extract-utilities" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.053756 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="extract-utilities" Dec 05 08:02:43 crc kubenswrapper[4863]: E1205 08:02:43.053769 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="extract-content" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.053779 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="extract-content" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.053981 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d16facf-0052-49ae-979c-2788e6023f4c" containerName="registry-server" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.054591 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.057144 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.071249 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.119776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkbds\" (UniqueName: \"kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds\") pod \"mariadb-client-1-default\" (UID: \"73c8f417-f567-4a18-b9d9-b5cb2a682a12\") " pod="openstack/mariadb-client-1-default" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.221108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkbds\" (UniqueName: \"kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds\") pod \"mariadb-client-1-default\" (UID: \"73c8f417-f567-4a18-b9d9-b5cb2a682a12\") " pod="openstack/mariadb-client-1-default" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.245862 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkbds\" (UniqueName: \"kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds\") pod \"mariadb-client-1-default\" (UID: \"73c8f417-f567-4a18-b9d9-b5cb2a682a12\") " pod="openstack/mariadb-client-1-default" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.424278 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 05 08:02:43 crc kubenswrapper[4863]: I1205 08:02:43.995174 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 05 08:02:43 crc kubenswrapper[4863]: W1205 08:02:43.995961 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73c8f417_f567_4a18_b9d9_b5cb2a682a12.slice/crio-43cab8e3cd9ec2482c5a982db343cff9c5b23e3c99ab676019116d436fef335b WatchSource:0}: Error finding container 43cab8e3cd9ec2482c5a982db343cff9c5b23e3c99ab676019116d436fef335b: Status 404 returned error can't find the container with id 43cab8e3cd9ec2482c5a982db343cff9c5b23e3c99ab676019116d436fef335b Dec 05 08:02:44 crc kubenswrapper[4863]: I1205 08:02:44.694352 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"73c8f417-f567-4a18-b9d9-b5cb2a682a12","Type":"ContainerStarted","Data":"2f1b1c7d92ce4fe8119e3ff32f14c0720048341c2a479dd4f18edac7259e9664"} Dec 05 08:02:44 crc kubenswrapper[4863]: I1205 08:02:44.694791 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"73c8f417-f567-4a18-b9d9-b5cb2a682a12","Type":"ContainerStarted","Data":"43cab8e3cd9ec2482c5a982db343cff9c5b23e3c99ab676019116d436fef335b"} Dec 05 08:02:44 crc kubenswrapper[4863]: I1205 08:02:44.714192 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-1-default" podStartSLOduration=1.204334015 podStartE2EDuration="1.713892333s" podCreationTimestamp="2025-12-05 08:02:43 +0000 UTC" firstStartedPulling="2025-12-05 08:02:43.999631722 +0000 UTC m=+4591.725628782" lastFinishedPulling="2025-12-05 08:02:44.50919004 +0000 UTC m=+4592.235187100" observedRunningTime="2025-12-05 08:02:44.707759595 +0000 UTC m=+4592.433756635" watchObservedRunningTime="2025-12-05 08:02:44.713892333 +0000 UTC m=+4592.439889373" Dec 05 08:02:44 crc kubenswrapper[4863]: I1205 08:02:44.755222 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_73c8f417-f567-4a18-b9d9-b5cb2a682a12/mariadb-client-1-default/0.log" Dec 05 08:02:45 crc kubenswrapper[4863]: I1205 08:02:45.706612 4863 generic.go:334] "Generic (PLEG): container finished" podID="73c8f417-f567-4a18-b9d9-b5cb2a682a12" containerID="2f1b1c7d92ce4fe8119e3ff32f14c0720048341c2a479dd4f18edac7259e9664" exitCode=0 Dec 05 08:02:45 crc kubenswrapper[4863]: I1205 08:02:45.706672 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"73c8f417-f567-4a18-b9d9-b5cb2a682a12","Type":"ContainerDied","Data":"2f1b1c7d92ce4fe8119e3ff32f14c0720048341c2a479dd4f18edac7259e9664"} Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.131151 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.176515 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.185441 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.284422 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkbds\" (UniqueName: \"kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds\") pod \"73c8f417-f567-4a18-b9d9-b5cb2a682a12\" (UID: \"73c8f417-f567-4a18-b9d9-b5cb2a682a12\") " Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.290541 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds" (OuterVolumeSpecName: "kube-api-access-lkbds") pod "73c8f417-f567-4a18-b9d9-b5cb2a682a12" (UID: "73c8f417-f567-4a18-b9d9-b5cb2a682a12"). InnerVolumeSpecName "kube-api-access-lkbds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.386769 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkbds\" (UniqueName: \"kubernetes.io/projected/73c8f417-f567-4a18-b9d9-b5cb2a682a12-kube-api-access-lkbds\") on node \"crc\" DevicePath \"\"" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.681604 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Dec 05 08:02:47 crc kubenswrapper[4863]: E1205 08:02:47.682166 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c8f417-f567-4a18-b9d9-b5cb2a682a12" containerName="mariadb-client-1-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.682201 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c8f417-f567-4a18-b9d9-b5cb2a682a12" containerName="mariadb-client-1-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.682612 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="73c8f417-f567-4a18-b9d9-b5cb2a682a12" containerName="mariadb-client-1-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.683533 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.696440 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.732001 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43cab8e3cd9ec2482c5a982db343cff9c5b23e3c99ab676019116d436fef335b" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.732319 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.792516 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbmm4\" (UniqueName: \"kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4\") pod \"mariadb-client-2-default\" (UID: \"a869cd5e-1363-46bb-95a8-85f6767b0d2e\") " pod="openstack/mariadb-client-2-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.894869 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbmm4\" (UniqueName: \"kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4\") pod \"mariadb-client-2-default\" (UID: \"a869cd5e-1363-46bb-95a8-85f6767b0d2e\") " pod="openstack/mariadb-client-2-default" Dec 05 08:02:47 crc kubenswrapper[4863]: I1205 08:02:47.918239 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbmm4\" (UniqueName: \"kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4\") pod \"mariadb-client-2-default\" (UID: \"a869cd5e-1363-46bb-95a8-85f6767b0d2e\") " pod="openstack/mariadb-client-2-default" Dec 05 08:02:48 crc kubenswrapper[4863]: I1205 08:02:48.007121 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 05 08:02:48 crc kubenswrapper[4863]: I1205 08:02:48.602332 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:02:48 crc kubenswrapper[4863]: E1205 08:02:48.602886 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:48 crc kubenswrapper[4863]: I1205 08:02:48.616557 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73c8f417-f567-4a18-b9d9-b5cb2a682a12" path="/var/lib/kubelet/pods/73c8f417-f567-4a18-b9d9-b5cb2a682a12/volumes" Dec 05 08:02:48 crc kubenswrapper[4863]: I1205 08:02:48.617665 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 05 08:02:48 crc kubenswrapper[4863]: W1205 08:02:48.685033 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda869cd5e_1363_46bb_95a8_85f6767b0d2e.slice/crio-1c5338c6d3780c58ddba7e3b6c775df2479f5affb1d2a2fadd5dbe3f454a86bb WatchSource:0}: Error finding container 1c5338c6d3780c58ddba7e3b6c775df2479f5affb1d2a2fadd5dbe3f454a86bb: Status 404 returned error can't find the container with id 1c5338c6d3780c58ddba7e3b6c775df2479f5affb1d2a2fadd5dbe3f454a86bb Dec 05 08:02:48 crc kubenswrapper[4863]: I1205 08:02:48.744608 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"a869cd5e-1363-46bb-95a8-85f6767b0d2e","Type":"ContainerStarted","Data":"1c5338c6d3780c58ddba7e3b6c775df2479f5affb1d2a2fadd5dbe3f454a86bb"} Dec 05 08:02:49 crc kubenswrapper[4863]: I1205 08:02:49.753941 4863 generic.go:334] "Generic (PLEG): container finished" podID="a869cd5e-1363-46bb-95a8-85f6767b0d2e" containerID="1864ad980ee328d5284de7f815980f89f122fccee5b5018a2657c71c0a53a4dc" exitCode=1 Dec 05 08:02:49 crc kubenswrapper[4863]: I1205 08:02:49.754057 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"a869cd5e-1363-46bb-95a8-85f6767b0d2e","Type":"ContainerDied","Data":"1864ad980ee328d5284de7f815980f89f122fccee5b5018a2657c71c0a53a4dc"} Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.255423 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.282036 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_a869cd5e-1363-46bb-95a8-85f6767b0d2e/mariadb-client-2-default/0.log" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.315598 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.325240 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.347966 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbmm4\" (UniqueName: \"kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4\") pod \"a869cd5e-1363-46bb-95a8-85f6767b0d2e\" (UID: \"a869cd5e-1363-46bb-95a8-85f6767b0d2e\") " Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.354793 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4" (OuterVolumeSpecName: "kube-api-access-dbmm4") pod "a869cd5e-1363-46bb-95a8-85f6767b0d2e" (UID: "a869cd5e-1363-46bb-95a8-85f6767b0d2e"). InnerVolumeSpecName "kube-api-access-dbmm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.451920 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbmm4\" (UniqueName: \"kubernetes.io/projected/a869cd5e-1363-46bb-95a8-85f6767b0d2e-kube-api-access-dbmm4\") on node \"crc\" DevicePath \"\"" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.773241 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c5338c6d3780c58ddba7e3b6c775df2479f5affb1d2a2fadd5dbe3f454a86bb" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.773408 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.841119 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Dec 05 08:02:51 crc kubenswrapper[4863]: E1205 08:02:51.841527 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a869cd5e-1363-46bb-95a8-85f6767b0d2e" containerName="mariadb-client-2-default" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.841550 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a869cd5e-1363-46bb-95a8-85f6767b0d2e" containerName="mariadb-client-2-default" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.841774 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a869cd5e-1363-46bb-95a8-85f6767b0d2e" containerName="mariadb-client-2-default" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.842394 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.845045 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.868098 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 05 08:02:51 crc kubenswrapper[4863]: I1205 08:02:51.961320 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpp9h\" (UniqueName: \"kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h\") pod \"mariadb-client-1\" (UID: \"455824cc-3af1-4c83-8d81-f47caaa7b190\") " pod="openstack/mariadb-client-1" Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.062401 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpp9h\" (UniqueName: \"kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h\") pod \"mariadb-client-1\" (UID: \"455824cc-3af1-4c83-8d81-f47caaa7b190\") " pod="openstack/mariadb-client-1" Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.081971 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpp9h\" (UniqueName: \"kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h\") pod \"mariadb-client-1\" (UID: \"455824cc-3af1-4c83-8d81-f47caaa7b190\") " pod="openstack/mariadb-client-1" Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.171717 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.613917 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a869cd5e-1363-46bb-95a8-85f6767b0d2e" path="/var/lib/kubelet/pods/a869cd5e-1363-46bb-95a8-85f6767b0d2e/volumes" Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.738414 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 05 08:02:52 crc kubenswrapper[4863]: W1205 08:02:52.744131 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod455824cc_3af1_4c83_8d81_f47caaa7b190.slice/crio-574573ea473d7ccd7a1677127f3e0b3c8c6133b0859d1754cae78fedc3e29b88 WatchSource:0}: Error finding container 574573ea473d7ccd7a1677127f3e0b3c8c6133b0859d1754cae78fedc3e29b88: Status 404 returned error can't find the container with id 574573ea473d7ccd7a1677127f3e0b3c8c6133b0859d1754cae78fedc3e29b88 Dec 05 08:02:52 crc kubenswrapper[4863]: I1205 08:02:52.785539 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"455824cc-3af1-4c83-8d81-f47caaa7b190","Type":"ContainerStarted","Data":"574573ea473d7ccd7a1677127f3e0b3c8c6133b0859d1754cae78fedc3e29b88"} Dec 05 08:02:53 crc kubenswrapper[4863]: I1205 08:02:53.799059 4863 generic.go:334] "Generic (PLEG): container finished" podID="455824cc-3af1-4c83-8d81-f47caaa7b190" containerID="94919f1892b8e589ee2b35833115990d40b4afa985fe9889cb72d75011bf4676" exitCode=0 Dec 05 08:02:53 crc kubenswrapper[4863]: I1205 08:02:53.799406 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"455824cc-3af1-4c83-8d81-f47caaa7b190","Type":"ContainerDied","Data":"94919f1892b8e589ee2b35833115990d40b4afa985fe9889cb72d75011bf4676"} Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.221195 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.246981 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_455824cc-3af1-4c83-8d81-f47caaa7b190/mariadb-client-1/0.log" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.280920 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.289363 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.315295 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpp9h\" (UniqueName: \"kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h\") pod \"455824cc-3af1-4c83-8d81-f47caaa7b190\" (UID: \"455824cc-3af1-4c83-8d81-f47caaa7b190\") " Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.321286 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h" (OuterVolumeSpecName: "kube-api-access-wpp9h") pod "455824cc-3af1-4c83-8d81-f47caaa7b190" (UID: "455824cc-3af1-4c83-8d81-f47caaa7b190"). InnerVolumeSpecName "kube-api-access-wpp9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.417983 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpp9h\" (UniqueName: \"kubernetes.io/projected/455824cc-3af1-4c83-8d81-f47caaa7b190-kube-api-access-wpp9h\") on node \"crc\" DevicePath \"\"" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.815247 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Dec 05 08:02:55 crc kubenswrapper[4863]: E1205 08:02:55.816262 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455824cc-3af1-4c83-8d81-f47caaa7b190" containerName="mariadb-client-1" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.816641 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="455824cc-3af1-4c83-8d81-f47caaa7b190" containerName="mariadb-client-1" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.817271 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="455824cc-3af1-4c83-8d81-f47caaa7b190" containerName="mariadb-client-1" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.818572 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.829104 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.881258 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="574573ea473d7ccd7a1677127f3e0b3c8c6133b0859d1754cae78fedc3e29b88" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.881752 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 05 08:02:55 crc kubenswrapper[4863]: I1205 08:02:55.926629 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmbpd\" (UniqueName: \"kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd\") pod \"mariadb-client-4-default\" (UID: \"bb33ccdd-743a-44c0-a197-d302d804e8cb\") " pod="openstack/mariadb-client-4-default" Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.029522 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmbpd\" (UniqueName: \"kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd\") pod \"mariadb-client-4-default\" (UID: \"bb33ccdd-743a-44c0-a197-d302d804e8cb\") " pod="openstack/mariadb-client-4-default" Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.056012 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmbpd\" (UniqueName: \"kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd\") pod \"mariadb-client-4-default\" (UID: \"bb33ccdd-743a-44c0-a197-d302d804e8cb\") " pod="openstack/mariadb-client-4-default" Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.187263 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.613615 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="455824cc-3af1-4c83-8d81-f47caaa7b190" path="/var/lib/kubelet/pods/455824cc-3af1-4c83-8d81-f47caaa7b190/volumes" Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.773123 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 05 08:02:56 crc kubenswrapper[4863]: W1205 08:02:56.806783 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb33ccdd_743a_44c0_a197_d302d804e8cb.slice/crio-3d56bc81fefda1c080b193adc99a0f507f66c27bffea24946c1b2bc1530136e6 WatchSource:0}: Error finding container 3d56bc81fefda1c080b193adc99a0f507f66c27bffea24946c1b2bc1530136e6: Status 404 returned error can't find the container with id 3d56bc81fefda1c080b193adc99a0f507f66c27bffea24946c1b2bc1530136e6 Dec 05 08:02:56 crc kubenswrapper[4863]: I1205 08:02:56.888952 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"bb33ccdd-743a-44c0-a197-d302d804e8cb","Type":"ContainerStarted","Data":"3d56bc81fefda1c080b193adc99a0f507f66c27bffea24946c1b2bc1530136e6"} Dec 05 08:02:57 crc kubenswrapper[4863]: I1205 08:02:57.899845 4863 generic.go:334] "Generic (PLEG): container finished" podID="bb33ccdd-743a-44c0-a197-d302d804e8cb" containerID="72cc767135e2e187720dc5122b36f1709a15edff3351b95db195de086f271ff5" exitCode=0 Dec 05 08:02:57 crc kubenswrapper[4863]: I1205 08:02:57.899907 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"bb33ccdd-743a-44c0-a197-d302d804e8cb","Type":"ContainerDied","Data":"72cc767135e2e187720dc5122b36f1709a15edff3351b95db195de086f271ff5"} Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.341443 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.367200 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_bb33ccdd-743a-44c0-a197-d302d804e8cb/mariadb-client-4-default/0.log" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.387088 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmbpd\" (UniqueName: \"kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd\") pod \"bb33ccdd-743a-44c0-a197-d302d804e8cb\" (UID: \"bb33ccdd-743a-44c0-a197-d302d804e8cb\") " Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.405056 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.414178 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd" (OuterVolumeSpecName: "kube-api-access-kmbpd") pod "bb33ccdd-743a-44c0-a197-d302d804e8cb" (UID: "bb33ccdd-743a-44c0-a197-d302d804e8cb"). InnerVolumeSpecName "kube-api-access-kmbpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.415373 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.488680 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmbpd\" (UniqueName: \"kubernetes.io/projected/bb33ccdd-743a-44c0-a197-d302d804e8cb-kube-api-access-kmbpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.602125 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:02:59 crc kubenswrapper[4863]: E1205 08:02:59.602546 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.913489 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d56bc81fefda1c080b193adc99a0f507f66c27bffea24946c1b2bc1530136e6" Dec 05 08:02:59 crc kubenswrapper[4863]: I1205 08:02:59.913550 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 05 08:03:00 crc kubenswrapper[4863]: I1205 08:03:00.618698 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb33ccdd-743a-44c0-a197-d302d804e8cb" path="/var/lib/kubelet/pods/bb33ccdd-743a-44c0-a197-d302d804e8cb/volumes" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.741246 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Dec 05 08:03:02 crc kubenswrapper[4863]: E1205 08:03:02.742546 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb33ccdd-743a-44c0-a197-d302d804e8cb" containerName="mariadb-client-4-default" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.742562 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb33ccdd-743a-44c0-a197-d302d804e8cb" containerName="mariadb-client-4-default" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.742719 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb33ccdd-743a-44c0-a197-d302d804e8cb" containerName="mariadb-client-4-default" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.744174 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.757784 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.779622 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.855028 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6rtz\" (UniqueName: \"kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz\") pod \"mariadb-client-5-default\" (UID: \"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9\") " pod="openstack/mariadb-client-5-default" Dec 05 08:03:02 crc kubenswrapper[4863]: I1205 08:03:02.957153 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6rtz\" (UniqueName: \"kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz\") pod \"mariadb-client-5-default\" (UID: \"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9\") " pod="openstack/mariadb-client-5-default" Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.000212 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6rtz\" (UniqueName: \"kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz\") pod \"mariadb-client-5-default\" (UID: \"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9\") " pod="openstack/mariadb-client-5-default" Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.095363 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.690872 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.954697 4863 generic.go:334] "Generic (PLEG): container finished" podID="8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" containerID="983eeaf788b6587451ff71fda48c5f1d9306d8568b30e75fea3e812f45dfae3e" exitCode=0 Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.954795 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9","Type":"ContainerDied","Data":"983eeaf788b6587451ff71fda48c5f1d9306d8568b30e75fea3e812f45dfae3e"} Dec 05 08:03:03 crc kubenswrapper[4863]: I1205 08:03:03.954968 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9","Type":"ContainerStarted","Data":"0c6ec815dec9cfce0778ef229d1d05f05404163cd88d65997c12c0ef8909e7b7"} Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.432642 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.451407 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9/mariadb-client-5-default/0.log" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.478099 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.484754 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.497755 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6rtz\" (UniqueName: \"kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz\") pod \"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9\" (UID: \"8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9\") " Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.505667 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz" (OuterVolumeSpecName: "kube-api-access-w6rtz") pod "8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" (UID: "8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9"). InnerVolumeSpecName "kube-api-access-w6rtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.599760 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6rtz\" (UniqueName: \"kubernetes.io/projected/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9-kube-api-access-w6rtz\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.634887 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Dec 05 08:03:05 crc kubenswrapper[4863]: E1205 08:03:05.635289 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" containerName="mariadb-client-5-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.635310 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" containerName="mariadb-client-5-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.635458 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" containerName="mariadb-client-5-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.636026 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.651289 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.701279 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kqlf\" (UniqueName: \"kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf\") pod \"mariadb-client-6-default\" (UID: \"67a41c74-234d-491c-8e1a-eb51ee26e87a\") " pod="openstack/mariadb-client-6-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.802358 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kqlf\" (UniqueName: \"kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf\") pod \"mariadb-client-6-default\" (UID: \"67a41c74-234d-491c-8e1a-eb51ee26e87a\") " pod="openstack/mariadb-client-6-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.824534 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kqlf\" (UniqueName: \"kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf\") pod \"mariadb-client-6-default\" (UID: \"67a41c74-234d-491c-8e1a-eb51ee26e87a\") " pod="openstack/mariadb-client-6-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.968711 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.978669 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c6ec815dec9cfce0778ef229d1d05f05404163cd88d65997c12c0ef8909e7b7" Dec 05 08:03:05 crc kubenswrapper[4863]: I1205 08:03:05.978821 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 05 08:03:06 crc kubenswrapper[4863]: W1205 08:03:06.411520 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67a41c74_234d_491c_8e1a_eb51ee26e87a.slice/crio-e565805ebf7ea8d3e09367b056b58b52696c57b32246f04b58fe5614a56a1c0c WatchSource:0}: Error finding container e565805ebf7ea8d3e09367b056b58b52696c57b32246f04b58fe5614a56a1c0c: Status 404 returned error can't find the container with id e565805ebf7ea8d3e09367b056b58b52696c57b32246f04b58fe5614a56a1c0c Dec 05 08:03:06 crc kubenswrapper[4863]: I1205 08:03:06.411967 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 05 08:03:06 crc kubenswrapper[4863]: I1205 08:03:06.610188 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9" path="/var/lib/kubelet/pods/8d628a49-cd80-4ea5-9f6b-73f3c8cde8d9/volumes" Dec 05 08:03:06 crc kubenswrapper[4863]: I1205 08:03:06.991097 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"67a41c74-234d-491c-8e1a-eb51ee26e87a","Type":"ContainerStarted","Data":"f943a92df5ae2b6e8004a03276c49a95d02f918affe8b1e40d9e8c892106e554"} Dec 05 08:03:06 crc kubenswrapper[4863]: I1205 08:03:06.991158 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"67a41c74-234d-491c-8e1a-eb51ee26e87a","Type":"ContainerStarted","Data":"e565805ebf7ea8d3e09367b056b58b52696c57b32246f04b58fe5614a56a1c0c"} Dec 05 08:03:07 crc kubenswrapper[4863]: I1205 08:03:07.022791 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=2.022761542 podStartE2EDuration="2.022761542s" podCreationTimestamp="2025-12-05 08:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:03:07.013248122 +0000 UTC m=+4614.739245192" watchObservedRunningTime="2025-12-05 08:03:07.022761542 +0000 UTC m=+4614.748758622" Dec 05 08:03:07 crc kubenswrapper[4863]: I1205 08:03:07.102162 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_67a41c74-234d-491c-8e1a-eb51ee26e87a/mariadb-client-6-default/0.log" Dec 05 08:03:08 crc kubenswrapper[4863]: I1205 08:03:08.002829 4863 generic.go:334] "Generic (PLEG): container finished" podID="67a41c74-234d-491c-8e1a-eb51ee26e87a" containerID="f943a92df5ae2b6e8004a03276c49a95d02f918affe8b1e40d9e8c892106e554" exitCode=1 Dec 05 08:03:08 crc kubenswrapper[4863]: I1205 08:03:08.002954 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"67a41c74-234d-491c-8e1a-eb51ee26e87a","Type":"ContainerDied","Data":"f943a92df5ae2b6e8004a03276c49a95d02f918affe8b1e40d9e8c892106e554"} Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.439200 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.464297 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kqlf\" (UniqueName: \"kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf\") pod \"67a41c74-234d-491c-8e1a-eb51ee26e87a\" (UID: \"67a41c74-234d-491c-8e1a-eb51ee26e87a\") " Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.471933 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf" (OuterVolumeSpecName: "kube-api-access-6kqlf") pod "67a41c74-234d-491c-8e1a-eb51ee26e87a" (UID: "67a41c74-234d-491c-8e1a-eb51ee26e87a"). InnerVolumeSpecName "kube-api-access-6kqlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.488911 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.498689 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.566732 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kqlf\" (UniqueName: \"kubernetes.io/projected/67a41c74-234d-491c-8e1a-eb51ee26e87a-kube-api-access-6kqlf\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.616603 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Dec 05 08:03:09 crc kubenswrapper[4863]: E1205 08:03:09.616950 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67a41c74-234d-491c-8e1a-eb51ee26e87a" containerName="mariadb-client-6-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.616970 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="67a41c74-234d-491c-8e1a-eb51ee26e87a" containerName="mariadb-client-6-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.617182 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="67a41c74-234d-491c-8e1a-eb51ee26e87a" containerName="mariadb-client-6-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.617799 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.648232 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.668704 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwc8j\" (UniqueName: \"kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j\") pod \"mariadb-client-7-default\" (UID: \"6e92c086-58f0-4837-9440-fee9c2a46cef\") " pod="openstack/mariadb-client-7-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.770287 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwc8j\" (UniqueName: \"kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j\") pod \"mariadb-client-7-default\" (UID: \"6e92c086-58f0-4837-9440-fee9c2a46cef\") " pod="openstack/mariadb-client-7-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.800114 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwc8j\" (UniqueName: \"kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j\") pod \"mariadb-client-7-default\" (UID: \"6e92c086-58f0-4837-9440-fee9c2a46cef\") " pod="openstack/mariadb-client-7-default" Dec 05 08:03:09 crc kubenswrapper[4863]: I1205 08:03:09.945929 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 05 08:03:10 crc kubenswrapper[4863]: I1205 08:03:10.032670 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e565805ebf7ea8d3e09367b056b58b52696c57b32246f04b58fe5614a56a1c0c" Dec 05 08:03:10 crc kubenswrapper[4863]: I1205 08:03:10.032746 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 05 08:03:10 crc kubenswrapper[4863]: I1205 08:03:10.246434 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 05 08:03:10 crc kubenswrapper[4863]: W1205 08:03:10.253956 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e92c086_58f0_4837_9440_fee9c2a46cef.slice/crio-8d40a44361f95fb3e0524ec168229582660751970b5b1c1d8601e22e274a0e99 WatchSource:0}: Error finding container 8d40a44361f95fb3e0524ec168229582660751970b5b1c1d8601e22e274a0e99: Status 404 returned error can't find the container with id 8d40a44361f95fb3e0524ec168229582660751970b5b1c1d8601e22e274a0e99 Dec 05 08:03:10 crc kubenswrapper[4863]: I1205 08:03:10.614569 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67a41c74-234d-491c-8e1a-eb51ee26e87a" path="/var/lib/kubelet/pods/67a41c74-234d-491c-8e1a-eb51ee26e87a/volumes" Dec 05 08:03:11 crc kubenswrapper[4863]: I1205 08:03:11.046421 4863 generic.go:334] "Generic (PLEG): container finished" podID="6e92c086-58f0-4837-9440-fee9c2a46cef" containerID="c1d913364f4f88eaec7d6a08351ac010e4aa7077042366ae27aae980aa33351b" exitCode=0 Dec 05 08:03:11 crc kubenswrapper[4863]: I1205 08:03:11.046535 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"6e92c086-58f0-4837-9440-fee9c2a46cef","Type":"ContainerDied","Data":"c1d913364f4f88eaec7d6a08351ac010e4aa7077042366ae27aae980aa33351b"} Dec 05 08:03:11 crc kubenswrapper[4863]: I1205 08:03:11.047446 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"6e92c086-58f0-4837-9440-fee9c2a46cef","Type":"ContainerStarted","Data":"8d40a44361f95fb3e0524ec168229582660751970b5b1c1d8601e22e274a0e99"} Dec 05 08:03:11 crc kubenswrapper[4863]: I1205 08:03:11.602299 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.070300 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817"} Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.431988 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.456589 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_6e92c086-58f0-4837-9440-fee9c2a46cef/mariadb-client-7-default/0.log" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.489012 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.495396 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.514116 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwc8j\" (UniqueName: \"kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j\") pod \"6e92c086-58f0-4837-9440-fee9c2a46cef\" (UID: \"6e92c086-58f0-4837-9440-fee9c2a46cef\") " Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.526808 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j" (OuterVolumeSpecName: "kube-api-access-gwc8j") pod "6e92c086-58f0-4837-9440-fee9c2a46cef" (UID: "6e92c086-58f0-4837-9440-fee9c2a46cef"). InnerVolumeSpecName "kube-api-access-gwc8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.615260 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e92c086-58f0-4837-9440-fee9c2a46cef" path="/var/lib/kubelet/pods/6e92c086-58f0-4837-9440-fee9c2a46cef/volumes" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.618269 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwc8j\" (UniqueName: \"kubernetes.io/projected/6e92c086-58f0-4837-9440-fee9c2a46cef-kube-api-access-gwc8j\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.631383 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Dec 05 08:03:12 crc kubenswrapper[4863]: E1205 08:03:12.631977 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e92c086-58f0-4837-9440-fee9c2a46cef" containerName="mariadb-client-7-default" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.632002 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e92c086-58f0-4837-9440-fee9c2a46cef" containerName="mariadb-client-7-default" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.632166 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e92c086-58f0-4837-9440-fee9c2a46cef" containerName="mariadb-client-7-default" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.632716 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.649250 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.719172 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm2r9\" (UniqueName: \"kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9\") pod \"mariadb-client-2\" (UID: \"4026902a-270b-424d-bf37-160fcb5312d8\") " pod="openstack/mariadb-client-2" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.820459 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm2r9\" (UniqueName: \"kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9\") pod \"mariadb-client-2\" (UID: \"4026902a-270b-424d-bf37-160fcb5312d8\") " pod="openstack/mariadb-client-2" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.860358 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm2r9\" (UniqueName: \"kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9\") pod \"mariadb-client-2\" (UID: \"4026902a-270b-424d-bf37-160fcb5312d8\") " pod="openstack/mariadb-client-2" Dec 05 08:03:12 crc kubenswrapper[4863]: I1205 08:03:12.959810 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 05 08:03:13 crc kubenswrapper[4863]: I1205 08:03:13.086273 4863 scope.go:117] "RemoveContainer" containerID="c1d913364f4f88eaec7d6a08351ac010e4aa7077042366ae27aae980aa33351b" Dec 05 08:03:13 crc kubenswrapper[4863]: I1205 08:03:13.086885 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 05 08:03:13 crc kubenswrapper[4863]: I1205 08:03:13.542155 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 05 08:03:13 crc kubenswrapper[4863]: W1205 08:03:13.546670 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4026902a_270b_424d_bf37_160fcb5312d8.slice/crio-837dce41236f18cdf31ec75c4193e7f2bfb0847dd777e66766ecf3220e6cdb47 WatchSource:0}: Error finding container 837dce41236f18cdf31ec75c4193e7f2bfb0847dd777e66766ecf3220e6cdb47: Status 404 returned error can't find the container with id 837dce41236f18cdf31ec75c4193e7f2bfb0847dd777e66766ecf3220e6cdb47 Dec 05 08:03:14 crc kubenswrapper[4863]: I1205 08:03:14.098510 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"4026902a-270b-424d-bf37-160fcb5312d8","Type":"ContainerStarted","Data":"837dce41236f18cdf31ec75c4193e7f2bfb0847dd777e66766ecf3220e6cdb47"} Dec 05 08:03:15 crc kubenswrapper[4863]: I1205 08:03:15.110619 4863 generic.go:334] "Generic (PLEG): container finished" podID="4026902a-270b-424d-bf37-160fcb5312d8" containerID="4621ba4465233bea85c27f223167ffa8331a949bdfa38b766dc107cb46d8ad64" exitCode=0 Dec 05 08:03:15 crc kubenswrapper[4863]: I1205 08:03:15.110739 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"4026902a-270b-424d-bf37-160fcb5312d8","Type":"ContainerDied","Data":"4621ba4465233bea85c27f223167ffa8331a949bdfa38b766dc107cb46d8ad64"} Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.591211 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.610053 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_4026902a-270b-424d-bf37-160fcb5312d8/mariadb-client-2/0.log" Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.639501 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.644927 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.781861 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm2r9\" (UniqueName: \"kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9\") pod \"4026902a-270b-424d-bf37-160fcb5312d8\" (UID: \"4026902a-270b-424d-bf37-160fcb5312d8\") " Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.797790 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9" (OuterVolumeSpecName: "kube-api-access-rm2r9") pod "4026902a-270b-424d-bf37-160fcb5312d8" (UID: "4026902a-270b-424d-bf37-160fcb5312d8"). InnerVolumeSpecName "kube-api-access-rm2r9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:03:16 crc kubenswrapper[4863]: I1205 08:03:16.883909 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm2r9\" (UniqueName: \"kubernetes.io/projected/4026902a-270b-424d-bf37-160fcb5312d8-kube-api-access-rm2r9\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:17 crc kubenswrapper[4863]: I1205 08:03:17.136524 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="837dce41236f18cdf31ec75c4193e7f2bfb0847dd777e66766ecf3220e6cdb47" Dec 05 08:03:17 crc kubenswrapper[4863]: I1205 08:03:17.136572 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 05 08:03:18 crc kubenswrapper[4863]: I1205 08:03:18.614922 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4026902a-270b-424d-bf37-160fcb5312d8" path="/var/lib/kubelet/pods/4026902a-270b-424d-bf37-160fcb5312d8/volumes" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.130525 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:20 crc kubenswrapper[4863]: E1205 08:03:20.131162 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4026902a-270b-424d-bf37-160fcb5312d8" containerName="mariadb-client-2" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.131178 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4026902a-270b-424d-bf37-160fcb5312d8" containerName="mariadb-client-2" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.131384 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4026902a-270b-424d-bf37-160fcb5312d8" containerName="mariadb-client-2" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.132673 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.165984 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.248699 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.248780 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.248812 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5bzn\" (UniqueName: \"kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.350149 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.350513 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.350657 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5bzn\" (UniqueName: \"kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.351182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.351208 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.372016 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5bzn\" (UniqueName: \"kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn\") pod \"community-operators-ntpkz\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:20 crc kubenswrapper[4863]: I1205 08:03:20.461103 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:21 crc kubenswrapper[4863]: I1205 08:03:21.008509 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:21 crc kubenswrapper[4863]: I1205 08:03:21.174092 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerStarted","Data":"892f1b44f27aba3950046d08419f56a59a09c6df5ef109fbafe654c8496c4fd8"} Dec 05 08:03:22 crc kubenswrapper[4863]: I1205 08:03:22.189845 4863 generic.go:334] "Generic (PLEG): container finished" podID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerID="2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33" exitCode=0 Dec 05 08:03:22 crc kubenswrapper[4863]: I1205 08:03:22.189912 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerDied","Data":"2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33"} Dec 05 08:03:23 crc kubenswrapper[4863]: I1205 08:03:23.199864 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerStarted","Data":"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405"} Dec 05 08:03:24 crc kubenswrapper[4863]: I1205 08:03:24.210460 4863 generic.go:334] "Generic (PLEG): container finished" podID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerID="1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405" exitCode=0 Dec 05 08:03:24 crc kubenswrapper[4863]: I1205 08:03:24.210548 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerDied","Data":"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405"} Dec 05 08:03:25 crc kubenswrapper[4863]: I1205 08:03:25.221246 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerStarted","Data":"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57"} Dec 05 08:03:25 crc kubenswrapper[4863]: I1205 08:03:25.253131 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ntpkz" podStartSLOduration=2.8120814100000002 podStartE2EDuration="5.253111119s" podCreationTimestamp="2025-12-05 08:03:20 +0000 UTC" firstStartedPulling="2025-12-05 08:03:22.192602793 +0000 UTC m=+4629.918599863" lastFinishedPulling="2025-12-05 08:03:24.633632492 +0000 UTC m=+4632.359629572" observedRunningTime="2025-12-05 08:03:25.245708111 +0000 UTC m=+4632.971705171" watchObservedRunningTime="2025-12-05 08:03:25.253111119 +0000 UTC m=+4632.979108169" Dec 05 08:03:29 crc kubenswrapper[4863]: I1205 08:03:29.353340 4863 scope.go:117] "RemoveContainer" containerID="9552eb7ec5c85eabe1cc3075430115ef1d6b735dad966d372bd84da20e0fb3fa" Dec 05 08:03:30 crc kubenswrapper[4863]: I1205 08:03:30.461550 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:30 crc kubenswrapper[4863]: I1205 08:03:30.462040 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:30 crc kubenswrapper[4863]: I1205 08:03:30.533654 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:31 crc kubenswrapper[4863]: I1205 08:03:31.368161 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:31 crc kubenswrapper[4863]: I1205 08:03:31.432800 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:33 crc kubenswrapper[4863]: I1205 08:03:33.310235 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ntpkz" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="registry-server" containerID="cri-o://7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57" gracePeriod=2 Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.303131 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.327316 4863 generic.go:334] "Generic (PLEG): container finished" podID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerID="7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57" exitCode=0 Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.327369 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerDied","Data":"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57"} Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.327404 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntpkz" event={"ID":"06def31b-336a-4b39-82a6-6cb12e0b05ef","Type":"ContainerDied","Data":"892f1b44f27aba3950046d08419f56a59a09c6df5ef109fbafe654c8496c4fd8"} Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.327424 4863 scope.go:117] "RemoveContainer" containerID="7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.327544 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntpkz" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.359986 4863 scope.go:117] "RemoveContainer" containerID="1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.388730 4863 scope.go:117] "RemoveContainer" containerID="2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.412405 4863 scope.go:117] "RemoveContainer" containerID="7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57" Dec 05 08:03:34 crc kubenswrapper[4863]: E1205 08:03:34.413023 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57\": container with ID starting with 7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57 not found: ID does not exist" containerID="7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413123 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57"} err="failed to get container status \"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57\": rpc error: code = NotFound desc = could not find container \"7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57\": container with ID starting with 7f347e1849edb17675e3b7f739b242f5a38bc1ad61165e60a5a09ea04c57fa57 not found: ID does not exist" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413177 4863 scope.go:117] "RemoveContainer" containerID="1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413490 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities\") pod \"06def31b-336a-4b39-82a6-6cb12e0b05ef\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413575 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5bzn\" (UniqueName: \"kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn\") pod \"06def31b-336a-4b39-82a6-6cb12e0b05ef\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " Dec 05 08:03:34 crc kubenswrapper[4863]: E1205 08:03:34.413648 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405\": container with ID starting with 1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405 not found: ID does not exist" containerID="1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413692 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405"} err="failed to get container status \"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405\": rpc error: code = NotFound desc = could not find container \"1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405\": container with ID starting with 1b1a1db30d3e01cc2745e8ade3c3e6865f41217a74200fba9b81639de1ba3405 not found: ID does not exist" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413728 4863 scope.go:117] "RemoveContainer" containerID="2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.413733 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content\") pod \"06def31b-336a-4b39-82a6-6cb12e0b05ef\" (UID: \"06def31b-336a-4b39-82a6-6cb12e0b05ef\") " Dec 05 08:03:34 crc kubenswrapper[4863]: E1205 08:03:34.414093 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33\": container with ID starting with 2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33 not found: ID does not exist" containerID="2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.414140 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33"} err="failed to get container status \"2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33\": rpc error: code = NotFound desc = could not find container \"2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33\": container with ID starting with 2cfc0c4f05057b519be0b6b22909e15b6e6e4b637437e6bcdd327f8e95359b33 not found: ID does not exist" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.414825 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities" (OuterVolumeSpecName: "utilities") pod "06def31b-336a-4b39-82a6-6cb12e0b05ef" (UID: "06def31b-336a-4b39-82a6-6cb12e0b05ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.423651 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn" (OuterVolumeSpecName: "kube-api-access-n5bzn") pod "06def31b-336a-4b39-82a6-6cb12e0b05ef" (UID: "06def31b-336a-4b39-82a6-6cb12e0b05ef"). InnerVolumeSpecName "kube-api-access-n5bzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.472050 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06def31b-336a-4b39-82a6-6cb12e0b05ef" (UID: "06def31b-336a-4b39-82a6-6cb12e0b05ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.514875 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.514935 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06def31b-336a-4b39-82a6-6cb12e0b05ef-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.514947 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5bzn\" (UniqueName: \"kubernetes.io/projected/06def31b-336a-4b39-82a6-6cb12e0b05ef-kube-api-access-n5bzn\") on node \"crc\" DevicePath \"\"" Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.667342 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:34 crc kubenswrapper[4863]: I1205 08:03:34.687945 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ntpkz"] Dec 05 08:03:36 crc kubenswrapper[4863]: I1205 08:03:36.615846 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" path="/var/lib/kubelet/pods/06def31b-336a-4b39-82a6-6cb12e0b05ef/volumes" Dec 05 08:05:38 crc kubenswrapper[4863]: I1205 08:05:38.464139 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:05:38 crc kubenswrapper[4863]: I1205 08:05:38.464932 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.634905 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 08:05:55 crc kubenswrapper[4863]: E1205 08:05:55.635798 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="registry-server" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.635813 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="registry-server" Dec 05 08:05:55 crc kubenswrapper[4863]: E1205 08:05:55.635828 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="extract-content" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.635834 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="extract-content" Dec 05 08:05:55 crc kubenswrapper[4863]: E1205 08:05:55.635846 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="extract-utilities" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.635870 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="extract-utilities" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.636002 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="06def31b-336a-4b39-82a6-6cb12e0b05ef" containerName="registry-server" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.636517 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.641136 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.652427 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.752101 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkhl8\" (UniqueName: \"kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.752157 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.853950 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkhl8\" (UniqueName: \"kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.854085 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.858052 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.858389 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9613d26243de0bf7c45818819eaa2f22f7fb6d8912a4492e0b0052905d089c8b/globalmount\"" pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.882980 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkhl8\" (UniqueName: \"kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.913898 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") pod \"mariadb-copy-data\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " pod="openstack/mariadb-copy-data" Dec 05 08:05:55 crc kubenswrapper[4863]: I1205 08:05:55.959727 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 05 08:05:56 crc kubenswrapper[4863]: I1205 08:05:56.549367 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 08:05:56 crc kubenswrapper[4863]: I1205 08:05:56.758943 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"970303aa-1c1f-4a60-9a92-a3d753caecef","Type":"ContainerStarted","Data":"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760"} Dec 05 08:05:56 crc kubenswrapper[4863]: I1205 08:05:56.758986 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"970303aa-1c1f-4a60-9a92-a3d753caecef","Type":"ContainerStarted","Data":"bd1ae1f1255c2b004800fb316c92e33ab02cddbf5ba6b7229ea71688ca704226"} Dec 05 08:05:56 crc kubenswrapper[4863]: I1205 08:05:56.781515 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.781489529 podStartE2EDuration="2.781489529s" podCreationTimestamp="2025-12-05 08:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:05:56.775663018 +0000 UTC m=+4784.501660058" watchObservedRunningTime="2025-12-05 08:05:56.781489529 +0000 UTC m=+4784.507486589" Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.627487 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.630441 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.645340 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.729719 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr4bk\" (UniqueName: \"kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk\") pod \"mariadb-client\" (UID: \"534bdb68-3c71-4a5a-8746-7815ae7ccdd2\") " pod="openstack/mariadb-client" Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.831059 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr4bk\" (UniqueName: \"kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk\") pod \"mariadb-client\" (UID: \"534bdb68-3c71-4a5a-8746-7815ae7ccdd2\") " pod="openstack/mariadb-client" Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.850696 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr4bk\" (UniqueName: \"kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk\") pod \"mariadb-client\" (UID: \"534bdb68-3c71-4a5a-8746-7815ae7ccdd2\") " pod="openstack/mariadb-client" Dec 05 08:05:59 crc kubenswrapper[4863]: I1205 08:05:59.960624 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:00 crc kubenswrapper[4863]: I1205 08:06:00.440895 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:00 crc kubenswrapper[4863]: W1205 08:06:00.448089 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod534bdb68_3c71_4a5a_8746_7815ae7ccdd2.slice/crio-3e5a01e450ac601856a7b8d779f5a04031cf694290781610d6580ed359f77156 WatchSource:0}: Error finding container 3e5a01e450ac601856a7b8d779f5a04031cf694290781610d6580ed359f77156: Status 404 returned error can't find the container with id 3e5a01e450ac601856a7b8d779f5a04031cf694290781610d6580ed359f77156 Dec 05 08:06:00 crc kubenswrapper[4863]: I1205 08:06:00.796984 4863 generic.go:334] "Generic (PLEG): container finished" podID="534bdb68-3c71-4a5a-8746-7815ae7ccdd2" containerID="9f853931f1045cd3ad2977a2051b87c076d91d06e7fde812598dbcc2e326d6da" exitCode=0 Dec 05 08:06:00 crc kubenswrapper[4863]: I1205 08:06:00.797050 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"534bdb68-3c71-4a5a-8746-7815ae7ccdd2","Type":"ContainerDied","Data":"9f853931f1045cd3ad2977a2051b87c076d91d06e7fde812598dbcc2e326d6da"} Dec 05 08:06:00 crc kubenswrapper[4863]: I1205 08:06:00.797388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"534bdb68-3c71-4a5a-8746-7815ae7ccdd2","Type":"ContainerStarted","Data":"3e5a01e450ac601856a7b8d779f5a04031cf694290781610d6580ed359f77156"} Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.162422 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.181358 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_534bdb68-3c71-4a5a-8746-7815ae7ccdd2/mariadb-client/0.log" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.204084 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.210596 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.277990 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr4bk\" (UniqueName: \"kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk\") pod \"534bdb68-3c71-4a5a-8746-7815ae7ccdd2\" (UID: \"534bdb68-3c71-4a5a-8746-7815ae7ccdd2\") " Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.286170 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk" (OuterVolumeSpecName: "kube-api-access-tr4bk") pod "534bdb68-3c71-4a5a-8746-7815ae7ccdd2" (UID: "534bdb68-3c71-4a5a-8746-7815ae7ccdd2"). InnerVolumeSpecName "kube-api-access-tr4bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.337616 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:02 crc kubenswrapper[4863]: E1205 08:06:02.338009 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="534bdb68-3c71-4a5a-8746-7815ae7ccdd2" containerName="mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.338028 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="534bdb68-3c71-4a5a-8746-7815ae7ccdd2" containerName="mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.338206 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="534bdb68-3c71-4a5a-8746-7815ae7ccdd2" containerName="mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.338800 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.344506 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.379540 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8nq5\" (UniqueName: \"kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5\") pod \"mariadb-client\" (UID: \"ad6abbb9-0dea-4990-98a0-74b913602578\") " pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.379944 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr4bk\" (UniqueName: \"kubernetes.io/projected/534bdb68-3c71-4a5a-8746-7815ae7ccdd2-kube-api-access-tr4bk\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.482777 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8nq5\" (UniqueName: \"kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5\") pod \"mariadb-client\" (UID: \"ad6abbb9-0dea-4990-98a0-74b913602578\") " pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.502752 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8nq5\" (UniqueName: \"kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5\") pod \"mariadb-client\" (UID: \"ad6abbb9-0dea-4990-98a0-74b913602578\") " pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.612124 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="534bdb68-3c71-4a5a-8746-7815ae7ccdd2" path="/var/lib/kubelet/pods/534bdb68-3c71-4a5a-8746-7815ae7ccdd2/volumes" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.656490 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.823034 4863 scope.go:117] "RemoveContainer" containerID="9f853931f1045cd3ad2977a2051b87c076d91d06e7fde812598dbcc2e326d6da" Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.823059 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:02 crc kubenswrapper[4863]: W1205 08:06:02.898659 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad6abbb9_0dea_4990_98a0_74b913602578.slice/crio-2c5f7845f6819d4afe5e8f6d8927e455e7f854ec93788f4726e3a6a85732549d WatchSource:0}: Error finding container 2c5f7845f6819d4afe5e8f6d8927e455e7f854ec93788f4726e3a6a85732549d: Status 404 returned error can't find the container with id 2c5f7845f6819d4afe5e8f6d8927e455e7f854ec93788f4726e3a6a85732549d Dec 05 08:06:02 crc kubenswrapper[4863]: I1205 08:06:02.898883 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:03 crc kubenswrapper[4863]: I1205 08:06:03.833390 4863 generic.go:334] "Generic (PLEG): container finished" podID="ad6abbb9-0dea-4990-98a0-74b913602578" containerID="c07380f196ce9cd217ff1823b552142d6f226f6510917ab8b535c9a03e16dcef" exitCode=0 Dec 05 08:06:03 crc kubenswrapper[4863]: I1205 08:06:03.833440 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ad6abbb9-0dea-4990-98a0-74b913602578","Type":"ContainerDied","Data":"c07380f196ce9cd217ff1823b552142d6f226f6510917ab8b535c9a03e16dcef"} Dec 05 08:06:03 crc kubenswrapper[4863]: I1205 08:06:03.833516 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ad6abbb9-0dea-4990-98a0-74b913602578","Type":"ContainerStarted","Data":"2c5f7845f6819d4afe5e8f6d8927e455e7f854ec93788f4726e3a6a85732549d"} Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.205418 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.226955 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_ad6abbb9-0dea-4990-98a0-74b913602578/mariadb-client/0.log" Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.251373 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.257085 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.323624 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8nq5\" (UniqueName: \"kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5\") pod \"ad6abbb9-0dea-4990-98a0-74b913602578\" (UID: \"ad6abbb9-0dea-4990-98a0-74b913602578\") " Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.330092 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5" (OuterVolumeSpecName: "kube-api-access-r8nq5") pod "ad6abbb9-0dea-4990-98a0-74b913602578" (UID: "ad6abbb9-0dea-4990-98a0-74b913602578"). InnerVolumeSpecName "kube-api-access-r8nq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.425329 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8nq5\" (UniqueName: \"kubernetes.io/projected/ad6abbb9-0dea-4990-98a0-74b913602578-kube-api-access-r8nq5\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.866698 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c5f7845f6819d4afe5e8f6d8927e455e7f854ec93788f4726e3a6a85732549d" Dec 05 08:06:05 crc kubenswrapper[4863]: I1205 08:06:05.866798 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 05 08:06:06 crc kubenswrapper[4863]: I1205 08:06:06.613223 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad6abbb9-0dea-4990-98a0-74b913602578" path="/var/lib/kubelet/pods/ad6abbb9-0dea-4990-98a0-74b913602578/volumes" Dec 05 08:06:08 crc kubenswrapper[4863]: I1205 08:06:08.464126 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:06:08 crc kubenswrapper[4863]: I1205 08:06:08.464532 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.318166 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:06:34 crc kubenswrapper[4863]: E1205 08:06:34.319128 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6abbb9-0dea-4990-98a0-74b913602578" containerName="mariadb-client" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.319145 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6abbb9-0dea-4990-98a0-74b913602578" containerName="mariadb-client" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.319344 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6abbb9-0dea-4990-98a0-74b913602578" containerName="mariadb-client" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.320385 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.324315 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.324876 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.325292 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-ghj98" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.337614 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.340769 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.350597 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.367149 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.370728 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.381268 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.388890 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496020 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496086 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496119 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496140 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496167 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496192 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c00709-2068-4e67-875b-63e1686cbac0-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496209 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwhp4\" (UniqueName: \"kubernetes.io/projected/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-kube-api-access-zwhp4\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496233 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3900cd64-7e39-475d-a10d-aacc603edd0e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496250 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3900cd64-7e39-475d-a10d-aacc603edd0e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496291 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c3c00709-2068-4e67-875b-63e1686cbac0-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496312 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-config\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496338 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fhzh\" (UniqueName: \"kubernetes.io/projected/3900cd64-7e39-475d-a10d-aacc603edd0e-kube-api-access-9fhzh\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496361 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496529 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496561 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-config\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496602 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88z4p\" (UniqueName: \"kubernetes.io/projected/c3c00709-2068-4e67-875b-63e1686cbac0-kube-api-access-88z4p\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496620 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-config\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.496645 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.557341 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.559822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.562106 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.562299 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.563005 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ddgqb" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.581831 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.595013 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.596346 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.597775 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.598081 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-config\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.598335 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-config\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.598631 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88z4p\" (UniqueName: \"kubernetes.io/projected/c3c00709-2068-4e67-875b-63e1686cbac0-kube-api-access-88z4p\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599151 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599336 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599499 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-config\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599168 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599263 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-config\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.599830 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.600032 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.600289 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.600647 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.600827 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c00709-2068-4e67-875b-63e1686cbac0-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.600983 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwhp4\" (UniqueName: \"kubernetes.io/projected/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-kube-api-access-zwhp4\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.601205 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3900cd64-7e39-475d-a10d-aacc603edd0e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.601371 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3900cd64-7e39-475d-a10d-aacc603edd0e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.601976 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c3c00709-2068-4e67-875b-63e1686cbac0-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602171 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-config\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602346 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602389 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4d1646813910c91fa9c6639dee3675b1d868737e776deca5b8b9b354c968a6d1/globalmount\"" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602354 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fhzh\" (UniqueName: \"kubernetes.io/projected/3900cd64-7e39-475d-a10d-aacc603edd0e-kube-api-access-9fhzh\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602527 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602572 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602708 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b288a58a4efc5616d085407cbdd9fca9b55678bd538511403840bc6c32d4101c/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602346 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.602746 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0561e09b8239132a9517f7d1c286149b55434fa8b263e41ac3846b1f3b305a66/globalmount\"" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.601439 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3900cd64-7e39-475d-a10d-aacc603edd0e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.603903 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.604656 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c3c00709-2068-4e67-875b-63e1686cbac0-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.604977 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3900cd64-7e39-475d-a10d-aacc603edd0e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.605606 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c00709-2068-4e67-875b-63e1686cbac0-config\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.607987 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.620657 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c00709-2068-4e67-875b-63e1686cbac0-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.621410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3900cd64-7e39-475d-a10d-aacc603edd0e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.621991 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.624799 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88z4p\" (UniqueName: \"kubernetes.io/projected/c3c00709-2068-4e67-875b-63e1686cbac0-kube-api-access-88z4p\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.625790 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.627860 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.628657 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fhzh\" (UniqueName: \"kubernetes.io/projected/3900cd64-7e39-475d-a10d-aacc603edd0e-kube-api-access-9fhzh\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.632133 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.633335 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwhp4\" (UniqueName: \"kubernetes.io/projected/5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3-kube-api-access-zwhp4\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.639509 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.642360 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ceca55ab-a798-4aee-8d8c-cc690985bdda\") pod \"ovsdbserver-nb-0\" (UID: \"3900cd64-7e39-475d-a10d-aacc603edd0e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.645095 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-efe29aee-70e1-4dae-aad7-7251420bbab9\") pod \"ovsdbserver-nb-2\" (UID: \"c3c00709-2068-4e67-875b-63e1686cbac0\") " pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.666573 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fa301462-1a80-48a7-910c-d04c00adaf9d\") pod \"ovsdbserver-nb-1\" (UID: \"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3\") " pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.678773 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.700877 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703729 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-59c002ae-de10-4c2d-bd30-243630122943\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-59c002ae-de10-4c2d-bd30-243630122943\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703873 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703897 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-config\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703924 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vlsz\" (UniqueName: \"kubernetes.io/projected/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-kube-api-access-5vlsz\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.703973 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704007 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-config\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704036 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704083 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704122 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704182 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.704234 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kjqb\" (UniqueName: \"kubernetes.io/projected/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-kube-api-access-9kjqb\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.805864 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806138 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806183 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kjqb\" (UniqueName: \"kubernetes.io/projected/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-kube-api-access-9kjqb\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806209 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806227 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-config\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806246 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-59c002ae-de10-4c2d-bd30-243630122943\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-59c002ae-de10-4c2d-bd30-243630122943\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806276 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d61af564-926f-45f7-a660-d9e297e49bc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d61af564-926f-45f7-a660-d9e297e49bc1\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806298 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806314 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scr79\" (UniqueName: \"kubernetes.io/projected/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-kube-api-access-scr79\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806330 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-config\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806350 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vlsz\" (UniqueName: \"kubernetes.io/projected/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-kube-api-access-5vlsz\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806376 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806403 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806426 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-config\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806446 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806487 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.806509 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.807782 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.807898 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.808634 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-config\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.808757 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.809627 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-config\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.811180 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.811212 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-59c002ae-de10-4c2d-bd30-243630122943\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-59c002ae-de10-4c2d-bd30-243630122943\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8e4823f07c58cb54add631de0f3eb8042e35bb9517d448ec8c0e95f1e1a8dc7b/globalmount\"" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.811312 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.813839 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.813885 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b7cbf4d5ccafd7914744e20dbce1f776e0c53fd7a2d3fc1d2ba00bb685ba917e/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.816446 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.817375 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.825430 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kjqb\" (UniqueName: \"kubernetes.io/projected/a2103d64-1d3c-4b23-8d0e-f75fb68ac54c-kube-api-access-9kjqb\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.833071 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vlsz\" (UniqueName: \"kubernetes.io/projected/c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734-kube-api-access-5vlsz\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.856603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-59c002ae-de10-4c2d-bd30-243630122943\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-59c002ae-de10-4c2d-bd30-243630122943\") pod \"ovsdbserver-sb-1\" (UID: \"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734\") " pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.857711 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8d3bb2a-e326-4d41-82f6-9745df376142\") pod \"ovsdbserver-sb-0\" (UID: \"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:34 crc kubenswrapper[4863]: I1205 08:06:34.894880 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907760 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907805 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907851 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-config\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907890 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d61af564-926f-45f7-a660-d9e297e49bc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d61af564-926f-45f7-a660-d9e297e49bc1\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907919 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scr79\" (UniqueName: \"kubernetes.io/projected/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-kube-api-access-scr79\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.907969 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.909084 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.909608 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.911274 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-config\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.915020 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.922848 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.922878 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d61af564-926f-45f7-a660-d9e297e49bc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d61af564-926f-45f7-a660-d9e297e49bc1\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7f98912c116c61768213fbb0dc6b38efdea7aea721e9e33e2da5fd4f26c68630/globalmount\"" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.928904 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scr79\" (UniqueName: \"kubernetes.io/projected/c0211a31-33ca-4ef2-a7ca-482e4506e8dc-kube-api-access-scr79\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.940702 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:34.951185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d61af564-926f-45f7-a660-d9e297e49bc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d61af564-926f-45f7-a660-d9e297e49bc1\") pod \"ovsdbserver-sb-2\" (UID: \"c0211a31-33ca-4ef2-a7ca-482e4506e8dc\") " pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.083184 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.091142 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.189136 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.201299 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.285343 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 05 08:06:35 crc kubenswrapper[4863]: W1205 08:06:35.292722 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5213b0ea_3f9d_4df5_a3bd_2eb3330a6da3.slice/crio-bdf90f710782c60c035ced75393aaeaa35dad80dfb04f2ac3018e4048927f186 WatchSource:0}: Error finding container bdf90f710782c60c035ced75393aaeaa35dad80dfb04f2ac3018e4048927f186: Status 404 returned error can't find the container with id bdf90f710782c60c035ced75393aaeaa35dad80dfb04f2ac3018e4048927f186 Dec 05 08:06:35 crc kubenswrapper[4863]: I1205 08:06:35.928674 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 05 08:06:35 crc kubenswrapper[4863]: W1205 08:06:35.931629 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0211a31_33ca_4ef2_a7ca_482e4506e8dc.slice/crio-e35fea22b5fbf083fca0c7d4e6cf94361c82bc5b81a2c28460ab2189116bb92f WatchSource:0}: Error finding container e35fea22b5fbf083fca0c7d4e6cf94361c82bc5b81a2c28460ab2189116bb92f: Status 404 returned error can't find the container with id e35fea22b5fbf083fca0c7d4e6cf94361c82bc5b81a2c28460ab2189116bb92f Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.028295 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:06:36 crc kubenswrapper[4863]: W1205 08:06:36.028770 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3900cd64_7e39_475d_a10d_aacc603edd0e.slice/crio-0998fcebad0a06d5456ba000a266ded9e79fddb399548edf8caf81c4544e36c9 WatchSource:0}: Error finding container 0998fcebad0a06d5456ba000a266ded9e79fddb399548edf8caf81c4544e36c9: Status 404 returned error can't find the container with id 0998fcebad0a06d5456ba000a266ded9e79fddb399548edf8caf81c4544e36c9 Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.153052 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"c0211a31-33ca-4ef2-a7ca-482e4506e8dc","Type":"ContainerStarted","Data":"e35fea22b5fbf083fca0c7d4e6cf94361c82bc5b81a2c28460ab2189116bb92f"} Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.155531 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3900cd64-7e39-475d-a10d-aacc603edd0e","Type":"ContainerStarted","Data":"0998fcebad0a06d5456ba000a266ded9e79fddb399548edf8caf81c4544e36c9"} Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.157097 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3","Type":"ContainerStarted","Data":"bdf90f710782c60c035ced75393aaeaa35dad80dfb04f2ac3018e4048927f186"} Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.158603 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c3c00709-2068-4e67-875b-63e1686cbac0","Type":"ContainerStarted","Data":"2a9278d9984fd7d08d6308353e409f327f156bd100f1582c5180fe7bf8e3b87f"} Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.518642 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:06:36 crc kubenswrapper[4863]: I1205 08:06:36.933947 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 05 08:06:37 crc kubenswrapper[4863]: I1205 08:06:37.170676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734","Type":"ContainerStarted","Data":"34bcd6b031f133c2ef3ce0bf6638562db6ddd77fae79e716f9967cede36c9639"} Dec 05 08:06:37 crc kubenswrapper[4863]: I1205 08:06:37.172624 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c","Type":"ContainerStarted","Data":"f8ffbf1811ad2853c2c9d3a93de516bd3a9e636ddeeec29a1a58caa22a9bd352"} Dec 05 08:06:38 crc kubenswrapper[4863]: I1205 08:06:38.463908 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:06:38 crc kubenswrapper[4863]: I1205 08:06:38.464252 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:06:38 crc kubenswrapper[4863]: I1205 08:06:38.464305 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:06:38 crc kubenswrapper[4863]: I1205 08:06:38.465187 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:06:38 crc kubenswrapper[4863]: I1205 08:06:38.465264 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817" gracePeriod=600 Dec 05 08:06:39 crc kubenswrapper[4863]: I1205 08:06:39.192603 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817" exitCode=0 Dec 05 08:06:39 crc kubenswrapper[4863]: I1205 08:06:39.193098 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817"} Dec 05 08:06:39 crc kubenswrapper[4863]: I1205 08:06:39.193134 4863 scope.go:117] "RemoveContainer" containerID="81fd5f963b4708ba2b21fd282de8a40d3aac08305f5a36ed0d692d6a5f80440d" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.204919 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c3c00709-2068-4e67-875b-63e1686cbac0","Type":"ContainerStarted","Data":"0980aad6f21bf17907ddc90ea154073895989e3871f4a44a6c79e7de77d018fc"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.205853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c3c00709-2068-4e67-875b-63e1686cbac0","Type":"ContainerStarted","Data":"b82bf22359f21c3d73c69dace484e54191463b425c759ffe2206be63aee5feb4"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.207265 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"c0211a31-33ca-4ef2-a7ca-482e4506e8dc","Type":"ContainerStarted","Data":"fea0d49f4f600392eb01db045c7c0cec52b77aae8ebc04018c0124f9eddf7222"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.209792 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3900cd64-7e39-475d-a10d-aacc603edd0e","Type":"ContainerStarted","Data":"fecfbebd3cdb9d54d32c777dd26ace754a1f3db3ee276320348179223694e864"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.209819 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3900cd64-7e39-475d-a10d-aacc603edd0e","Type":"ContainerStarted","Data":"4acd3dae66218f3a97941bbc58de755e27b3fcb4ef3cf8fcdb8a20406d7d6b5e"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.212342 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734","Type":"ContainerStarted","Data":"68dd3c0b3ed039a7c82a1d06e13d9cff341286da4a1ca63f8fde97d68a3be4a4"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.212370 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734","Type":"ContainerStarted","Data":"0fbe27688833955a1c3288ed49f9d7b5141647224779142fa961e1ea7b6f1b2d"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.215591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.217436 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3","Type":"ContainerStarted","Data":"e7d5bf7f4d94bdfdd9e6952cab74e721950017fff0c39112ae7e2e463d35c90d"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.217462 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3","Type":"ContainerStarted","Data":"4aad60aa4893fb8c4637bd968afabf212877f1224bd0b13e978ca86ae73509eb"} Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.230649 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.215235866 podStartE2EDuration="7.23063103s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:35.200993766 +0000 UTC m=+4822.926990806" lastFinishedPulling="2025-12-05 08:06:39.21638893 +0000 UTC m=+4826.942385970" observedRunningTime="2025-12-05 08:06:40.22612889 +0000 UTC m=+4827.952125970" watchObservedRunningTime="2025-12-05 08:06:40.23063103 +0000 UTC m=+4827.956628070" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.267725 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.975313216 podStartE2EDuration="7.267708583s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:36.94500446 +0000 UTC m=+4824.671001500" lastFinishedPulling="2025-12-05 08:06:39.237399827 +0000 UTC m=+4826.963396867" observedRunningTime="2025-12-05 08:06:40.262393404 +0000 UTC m=+4827.988390454" watchObservedRunningTime="2025-12-05 08:06:40.267708583 +0000 UTC m=+4827.993705623" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.293549 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.41918907 podStartE2EDuration="7.293529775s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:35.297193045 +0000 UTC m=+4823.023190095" lastFinishedPulling="2025-12-05 08:06:39.17153377 +0000 UTC m=+4826.897530800" observedRunningTime="2025-12-05 08:06:40.288360371 +0000 UTC m=+4828.014357411" watchObservedRunningTime="2025-12-05 08:06:40.293529775 +0000 UTC m=+4828.019526825" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.320011 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.178684411 podStartE2EDuration="7.319987092s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:36.031848517 +0000 UTC m=+4823.757845557" lastFinishedPulling="2025-12-05 08:06:39.173151198 +0000 UTC m=+4826.899148238" observedRunningTime="2025-12-05 08:06:40.312138953 +0000 UTC m=+4828.038136033" watchObservedRunningTime="2025-12-05 08:06:40.319987092 +0000 UTC m=+4828.045984142" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.679943 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.701140 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:40 crc kubenswrapper[4863]: I1205 08:06:40.941649 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.083755 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.229209 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"c0211a31-33ca-4ef2-a7ca-482e4506e8dc","Type":"ContainerStarted","Data":"106ebce7f9dc975572a7654343adef7f14d6a18f87f5dcde61ac263deb4ae4bf"} Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.235230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c","Type":"ContainerStarted","Data":"949e2ef41a10c5612b5af055b501fa0b0f5577d6a80cd37c6203c5ee66d5ef5c"} Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.235292 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a2103d64-1d3c-4b23-8d0e-f75fb68ac54c","Type":"ContainerStarted","Data":"4aad9013f687ced170116d017080ed4382f2680f181fd1d8ae9cf9270f5f02b6"} Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.267369 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.966684317 podStartE2EDuration="8.267333529s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:35.933354203 +0000 UTC m=+4823.659351243" lastFinishedPulling="2025-12-05 08:06:39.234003415 +0000 UTC m=+4826.960000455" observedRunningTime="2025-12-05 08:06:41.253376903 +0000 UTC m=+4828.979374023" watchObservedRunningTime="2025-12-05 08:06:41.267333529 +0000 UTC m=+4828.993330599" Dec 05 08:06:41 crc kubenswrapper[4863]: I1205 08:06:41.289676 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.995929343 podStartE2EDuration="8.289636607s" podCreationTimestamp="2025-12-05 08:06:33 +0000 UTC" firstStartedPulling="2025-12-05 08:06:36.537300596 +0000 UTC m=+4824.263297636" lastFinishedPulling="2025-12-05 08:06:39.83100786 +0000 UTC m=+4827.557004900" observedRunningTime="2025-12-05 08:06:41.279160885 +0000 UTC m=+4829.005157925" watchObservedRunningTime="2025-12-05 08:06:41.289636607 +0000 UTC m=+4829.015633647" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.755224 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.755887 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.756201 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.756233 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.895997 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.930554 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.977589 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:43 crc kubenswrapper[4863]: I1205 08:06:43.977986 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.092738 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.137704 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.138155 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.156949 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.196819 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.264151 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.264192 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.310984 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.313658 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.315345 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.321211 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.465665 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9cd857f7-nhf6l"] Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.466912 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.468592 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.482609 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9cd857f7-nhf6l"] Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.592668 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzf27\" (UniqueName: \"kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.592717 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.592901 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.593006 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.674288 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9cd857f7-nhf6l"] Dec 05 08:06:44 crc kubenswrapper[4863]: E1205 08:06:44.674855 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-fzf27 ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" podUID="30460fd9-b341-4b7c-9cef-8ed2d47988c1" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.694729 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.694804 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.694878 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzf27\" (UniqueName: \"kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.694897 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.695645 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.700594 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.700658 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.714987 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.716496 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.722602 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.735698 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.737600 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzf27\" (UniqueName: \"kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27\") pod \"dnsmasq-dns-9cd857f7-nhf6l\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.795958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7jmt\" (UniqueName: \"kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.796455 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.796564 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.796598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.796712 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.898302 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.898391 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7jmt\" (UniqueName: \"kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.898417 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.898492 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.898514 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.899522 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.899541 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.899521 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.899587 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:44 crc kubenswrapper[4863]: I1205 08:06:44.915442 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7jmt\" (UniqueName: \"kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt\") pod \"dnsmasq-dns-c687bc57-znhsk\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.085251 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.279995 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.291123 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.328161 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.412278 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc\") pod \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.412541 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzf27\" (UniqueName: \"kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27\") pod \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.412768 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb\") pod \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.413234 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config\") pod \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\" (UID: \"30460fd9-b341-4b7c-9cef-8ed2d47988c1\") " Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.412895 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30460fd9-b341-4b7c-9cef-8ed2d47988c1" (UID: "30460fd9-b341-4b7c-9cef-8ed2d47988c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.413184 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30460fd9-b341-4b7c-9cef-8ed2d47988c1" (UID: "30460fd9-b341-4b7c-9cef-8ed2d47988c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.413658 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config" (OuterVolumeSpecName: "config") pod "30460fd9-b341-4b7c-9cef-8ed2d47988c1" (UID: "30460fd9-b341-4b7c-9cef-8ed2d47988c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.420709 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27" (OuterVolumeSpecName: "kube-api-access-fzf27") pod "30460fd9-b341-4b7c-9cef-8ed2d47988c1" (UID: "30460fd9-b341-4b7c-9cef-8ed2d47988c1"). InnerVolumeSpecName "kube-api-access-fzf27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.433478 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzf27\" (UniqueName: \"kubernetes.io/projected/30460fd9-b341-4b7c-9cef-8ed2d47988c1-kube-api-access-fzf27\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.433503 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.433513 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.433521 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30460fd9-b341-4b7c-9cef-8ed2d47988c1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:45 crc kubenswrapper[4863]: I1205 08:06:45.556993 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:06:45 crc kubenswrapper[4863]: W1205 08:06:45.560115 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb68180bd_6398_4a42_b0df_592d67c9fd17.slice/crio-95d7e63d3529da20a26be37e63f88429470687a2b7bdc587272bb1f7b572f0fc WatchSource:0}: Error finding container 95d7e63d3529da20a26be37e63f88429470687a2b7bdc587272bb1f7b572f0fc: Status 404 returned error can't find the container with id 95d7e63d3529da20a26be37e63f88429470687a2b7bdc587272bb1f7b572f0fc Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.288462 4863 generic.go:334] "Generic (PLEG): container finished" podID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerID="a308fb507492fa67c2a48068e47e322051eb8f7d1c95f279c2430505a5447faf" exitCode=0 Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.292756 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9cd857f7-nhf6l" Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.288707 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c687bc57-znhsk" event={"ID":"b68180bd-6398-4a42-b0df-592d67c9fd17","Type":"ContainerDied","Data":"a308fb507492fa67c2a48068e47e322051eb8f7d1c95f279c2430505a5447faf"} Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.294806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c687bc57-znhsk" event={"ID":"b68180bd-6398-4a42-b0df-592d67c9fd17","Type":"ContainerStarted","Data":"95d7e63d3529da20a26be37e63f88429470687a2b7bdc587272bb1f7b572f0fc"} Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.364614 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9cd857f7-nhf6l"] Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.370809 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9cd857f7-nhf6l"] Dec 05 08:06:46 crc kubenswrapper[4863]: I1205 08:06:46.611933 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30460fd9-b341-4b7c-9cef-8ed2d47988c1" path="/var/lib/kubelet/pods/30460fd9-b341-4b7c-9cef-8ed2d47988c1/volumes" Dec 05 08:06:47 crc kubenswrapper[4863]: I1205 08:06:47.300165 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c687bc57-znhsk" event={"ID":"b68180bd-6398-4a42-b0df-592d67c9fd17","Type":"ContainerStarted","Data":"c3a647833955e88a0a26d1e3961303dd3a6da6668582ba1eb6426faa362556d9"} Dec 05 08:06:47 crc kubenswrapper[4863]: I1205 08:06:47.300442 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:47 crc kubenswrapper[4863]: I1205 08:06:47.317557 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c687bc57-znhsk" podStartSLOduration=3.3175412140000002 podStartE2EDuration="3.317541214s" podCreationTimestamp="2025-12-05 08:06:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:06:47.316516339 +0000 UTC m=+4835.042513389" watchObservedRunningTime="2025-12-05 08:06:47.317541214 +0000 UTC m=+4835.043538254" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.107656 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.109191 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.111653 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.121741 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.186986 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zprxh\" (UniqueName: \"kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.187086 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.187117 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.288426 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zprxh\" (UniqueName: \"kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.288563 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.288597 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.293022 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.293057 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bcc0d0491943036698b39095d23f48b2bfc8e7cd7bc3a69cb9856bab4d2ef90f/globalmount\"" pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.297085 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.309250 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zprxh\" (UniqueName: \"kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.336969 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") pod \"ovn-copy-data\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " pod="openstack/ovn-copy-data" Dec 05 08:06:48 crc kubenswrapper[4863]: I1205 08:06:48.443917 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 05 08:06:49 crc kubenswrapper[4863]: I1205 08:06:49.025040 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 05 08:06:49 crc kubenswrapper[4863]: I1205 08:06:49.320017 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4e62a179-0048-43a4-943b-6f43455f44d1","Type":"ContainerStarted","Data":"80b5717d5dc1da4eaf8ab2a702231a159a093469d22fa5197ef6e04de3352aa2"} Dec 05 08:06:50 crc kubenswrapper[4863]: I1205 08:06:50.329596 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4e62a179-0048-43a4-943b-6f43455f44d1","Type":"ContainerStarted","Data":"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642"} Dec 05 08:06:50 crc kubenswrapper[4863]: I1205 08:06:50.358273 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.163441858 podStartE2EDuration="3.358248272s" podCreationTimestamp="2025-12-05 08:06:47 +0000 UTC" firstStartedPulling="2025-12-05 08:06:49.030937639 +0000 UTC m=+4836.756934689" lastFinishedPulling="2025-12-05 08:06:49.225744043 +0000 UTC m=+4836.951741103" observedRunningTime="2025-12-05 08:06:50.349457061 +0000 UTC m=+4838.075454101" watchObservedRunningTime="2025-12-05 08:06:50.358248272 +0000 UTC m=+4838.084245322" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.086636 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.187975 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.188385 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="dnsmasq-dns" containerID="cri-o://8d8fba718452c18c9f09275f4c23cbf14a01f1bf8d69e72e59f04fdc5ab25d2a" gracePeriod=10 Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.393681 4863 generic.go:334] "Generic (PLEG): container finished" podID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerID="8d8fba718452c18c9f09275f4c23cbf14a01f1bf8d69e72e59f04fdc5ab25d2a" exitCode=0 Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.393765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" event={"ID":"d49ebb46-e0a0-403f-814a-04bd6408dc24","Type":"ContainerDied","Data":"8d8fba718452c18c9f09275f4c23cbf14a01f1bf8d69e72e59f04fdc5ab25d2a"} Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.654457 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.814133 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config\") pod \"d49ebb46-e0a0-403f-814a-04bd6408dc24\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.814250 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twzqg\" (UniqueName: \"kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg\") pod \"d49ebb46-e0a0-403f-814a-04bd6408dc24\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.814326 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc\") pod \"d49ebb46-e0a0-403f-814a-04bd6408dc24\" (UID: \"d49ebb46-e0a0-403f-814a-04bd6408dc24\") " Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.821514 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg" (OuterVolumeSpecName: "kube-api-access-twzqg") pod "d49ebb46-e0a0-403f-814a-04bd6408dc24" (UID: "d49ebb46-e0a0-403f-814a-04bd6408dc24"). InnerVolumeSpecName "kube-api-access-twzqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.871811 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d49ebb46-e0a0-403f-814a-04bd6408dc24" (UID: "d49ebb46-e0a0-403f-814a-04bd6408dc24"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.877973 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config" (OuterVolumeSpecName: "config") pod "d49ebb46-e0a0-403f-814a-04bd6408dc24" (UID: "d49ebb46-e0a0-403f-814a-04bd6408dc24"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.917194 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.917241 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twzqg\" (UniqueName: \"kubernetes.io/projected/d49ebb46-e0a0-403f-814a-04bd6408dc24-kube-api-access-twzqg\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.917258 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49ebb46-e0a0-403f-814a-04bd6408dc24-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.959654 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:06:55 crc kubenswrapper[4863]: E1205 08:06:55.960001 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="dnsmasq-dns" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.960014 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="dnsmasq-dns" Dec 05 08:06:55 crc kubenswrapper[4863]: E1205 08:06:55.960047 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="init" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.960054 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="init" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.960214 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" containerName="dnsmasq-dns" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.961020 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.965087 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-52brx" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.965428 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.965769 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 08:06:55 crc kubenswrapper[4863]: I1205 08:06:55.972389 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.121044 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-scripts\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.121108 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjhcp\" (UniqueName: \"kubernetes.io/projected/0abe8166-fcb3-43c9-894f-36def72e5dde-kube-api-access-qjhcp\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.121436 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-config\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.122163 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abe8166-fcb3-43c9-894f-36def72e5dde-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.122277 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0abe8166-fcb3-43c9-894f-36def72e5dde-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.223340 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0abe8166-fcb3-43c9-894f-36def72e5dde-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.223408 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-scripts\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.223443 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjhcp\" (UniqueName: \"kubernetes.io/projected/0abe8166-fcb3-43c9-894f-36def72e5dde-kube-api-access-qjhcp\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.223536 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-config\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.223591 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abe8166-fcb3-43c9-894f-36def72e5dde-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.224130 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0abe8166-fcb3-43c9-894f-36def72e5dde-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.224990 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-config\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.225216 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0abe8166-fcb3-43c9-894f-36def72e5dde-scripts\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.229733 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abe8166-fcb3-43c9-894f-36def72e5dde-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.244185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjhcp\" (UniqueName: \"kubernetes.io/projected/0abe8166-fcb3-43c9-894f-36def72e5dde-kube-api-access-qjhcp\") pod \"ovn-northd-0\" (UID: \"0abe8166-fcb3-43c9-894f-36def72e5dde\") " pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.280645 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.405084 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" event={"ID":"d49ebb46-e0a0-403f-814a-04bd6408dc24","Type":"ContainerDied","Data":"a4d4a2750a14817df44a6dd095a48551819ab7e384e7b4832c6ac0cea12412c8"} Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.405354 4863 scope.go:117] "RemoveContainer" containerID="8d8fba718452c18c9f09275f4c23cbf14a01f1bf8d69e72e59f04fdc5ab25d2a" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.405535 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-778d75ccf7-9njb9" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.431216 4863 scope.go:117] "RemoveContainer" containerID="fc82a5b7499b9c6ce777b8dcdb76a11ef4681c510dfb8feea0ac84a404f237e8" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.448039 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.475730 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-778d75ccf7-9njb9"] Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.615140 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d49ebb46-e0a0-403f-814a-04bd6408dc24" path="/var/lib/kubelet/pods/d49ebb46-e0a0-403f-814a-04bd6408dc24/volumes" Dec 05 08:06:56 crc kubenswrapper[4863]: I1205 08:06:56.774816 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:06:57 crc kubenswrapper[4863]: I1205 08:06:57.413169 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0abe8166-fcb3-43c9-894f-36def72e5dde","Type":"ContainerStarted","Data":"fc982386081eb22acab349b1ebf8a374c3ae215f41da438b822f2a6abeff81f5"} Dec 05 08:06:58 crc kubenswrapper[4863]: I1205 08:06:58.428123 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0abe8166-fcb3-43c9-894f-36def72e5dde","Type":"ContainerStarted","Data":"0d7f7310a0035025496e283ae680c3e848ce1b4b7637af05855b3680db06f3ec"} Dec 05 08:06:58 crc kubenswrapper[4863]: I1205 08:06:58.428515 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0abe8166-fcb3-43c9-894f-36def72e5dde","Type":"ContainerStarted","Data":"8c2c46f526c8159ab3bde4a073b95ca5b76916a98888eb516f6118a025450ba6"} Dec 05 08:06:58 crc kubenswrapper[4863]: I1205 08:06:58.428660 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 08:06:58 crc kubenswrapper[4863]: I1205 08:06:58.464151 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.650766831 podStartE2EDuration="3.46411535s" podCreationTimestamp="2025-12-05 08:06:55 +0000 UTC" firstStartedPulling="2025-12-05 08:06:56.79205953 +0000 UTC m=+4844.518056580" lastFinishedPulling="2025-12-05 08:06:57.605408059 +0000 UTC m=+4845.331405099" observedRunningTime="2025-12-05 08:06:58.45458715 +0000 UTC m=+4846.180584270" watchObservedRunningTime="2025-12-05 08:06:58.46411535 +0000 UTC m=+4846.190112420" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.318668 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-jzf6h"] Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.320692 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.332431 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jzf6h"] Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.419341 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-24f8-account-create-update-w4k26"] Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.420609 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.426041 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-24f8-account-create-update-w4k26"] Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.428394 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.515865 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.516013 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r28h\" (UniqueName: \"kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.618450 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj855\" (UniqueName: \"kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.618782 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.618991 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.619117 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r28h\" (UniqueName: \"kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.619828 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.640185 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r28h\" (UniqueName: \"kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h\") pod \"keystone-db-create-jzf6h\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.679545 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.721153 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.721381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj855\" (UniqueName: \"kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.725161 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:01 crc kubenswrapper[4863]: I1205 08:07:01.746401 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj855\" (UniqueName: \"kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855\") pod \"keystone-24f8-account-create-update-w4k26\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:02 crc kubenswrapper[4863]: I1205 08:07:02.035781 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:02 crc kubenswrapper[4863]: I1205 08:07:02.157186 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jzf6h"] Dec 05 08:07:02 crc kubenswrapper[4863]: W1205 08:07:02.385256 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6159e7ab_c333_4262_a31f_6da90b2cb002.slice/crio-ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797 WatchSource:0}: Error finding container ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797: Status 404 returned error can't find the container with id ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797 Dec 05 08:07:02 crc kubenswrapper[4863]: I1205 08:07:02.483127 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jzf6h" event={"ID":"6159e7ab-c333-4262-a31f-6da90b2cb002","Type":"ContainerStarted","Data":"ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797"} Dec 05 08:07:02 crc kubenswrapper[4863]: I1205 08:07:02.871576 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-24f8-account-create-update-w4k26"] Dec 05 08:07:02 crc kubenswrapper[4863]: W1205 08:07:02.879640 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4aae0b60_50d8_497f_b764_1fa0999191be.slice/crio-fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd WatchSource:0}: Error finding container fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd: Status 404 returned error can't find the container with id fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd Dec 05 08:07:03 crc kubenswrapper[4863]: I1205 08:07:03.499868 4863 generic.go:334] "Generic (PLEG): container finished" podID="6159e7ab-c333-4262-a31f-6da90b2cb002" containerID="e2405a0988ea44908f1b0509bdf24e66d8657ee21cdfbd05e7449a3a55fa5397" exitCode=0 Dec 05 08:07:03 crc kubenswrapper[4863]: I1205 08:07:03.499946 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jzf6h" event={"ID":"6159e7ab-c333-4262-a31f-6da90b2cb002","Type":"ContainerDied","Data":"e2405a0988ea44908f1b0509bdf24e66d8657ee21cdfbd05e7449a3a55fa5397"} Dec 05 08:07:03 crc kubenswrapper[4863]: I1205 08:07:03.504077 4863 generic.go:334] "Generic (PLEG): container finished" podID="4aae0b60-50d8-497f-b764-1fa0999191be" containerID="4c19ec8091607f0440ea3d6420243a43ed8f539e9e142f57a14988ddaeaed738" exitCode=0 Dec 05 08:07:03 crc kubenswrapper[4863]: I1205 08:07:03.504151 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-24f8-account-create-update-w4k26" event={"ID":"4aae0b60-50d8-497f-b764-1fa0999191be","Type":"ContainerDied","Data":"4c19ec8091607f0440ea3d6420243a43ed8f539e9e142f57a14988ddaeaed738"} Dec 05 08:07:03 crc kubenswrapper[4863]: I1205 08:07:03.504196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-24f8-account-create-update-w4k26" event={"ID":"4aae0b60-50d8-497f-b764-1fa0999191be","Type":"ContainerStarted","Data":"fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd"} Dec 05 08:07:04 crc kubenswrapper[4863]: I1205 08:07:04.975038 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:04 crc kubenswrapper[4863]: I1205 08:07:04.980417 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.082979 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r28h\" (UniqueName: \"kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h\") pod \"6159e7ab-c333-4262-a31f-6da90b2cb002\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.083077 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj855\" (UniqueName: \"kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855\") pod \"4aae0b60-50d8-497f-b764-1fa0999191be\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.083108 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts\") pod \"6159e7ab-c333-4262-a31f-6da90b2cb002\" (UID: \"6159e7ab-c333-4262-a31f-6da90b2cb002\") " Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.083131 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts\") pod \"4aae0b60-50d8-497f-b764-1fa0999191be\" (UID: \"4aae0b60-50d8-497f-b764-1fa0999191be\") " Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.084009 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4aae0b60-50d8-497f-b764-1fa0999191be" (UID: "4aae0b60-50d8-497f-b764-1fa0999191be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.084009 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6159e7ab-c333-4262-a31f-6da90b2cb002" (UID: "6159e7ab-c333-4262-a31f-6da90b2cb002"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.091689 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855" (OuterVolumeSpecName: "kube-api-access-qj855") pod "4aae0b60-50d8-497f-b764-1fa0999191be" (UID: "4aae0b60-50d8-497f-b764-1fa0999191be"). InnerVolumeSpecName "kube-api-access-qj855". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.091809 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h" (OuterVolumeSpecName: "kube-api-access-4r28h") pod "6159e7ab-c333-4262-a31f-6da90b2cb002" (UID: "6159e7ab-c333-4262-a31f-6da90b2cb002"). InnerVolumeSpecName "kube-api-access-4r28h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.184716 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r28h\" (UniqueName: \"kubernetes.io/projected/6159e7ab-c333-4262-a31f-6da90b2cb002-kube-api-access-4r28h\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.184750 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj855\" (UniqueName: \"kubernetes.io/projected/4aae0b60-50d8-497f-b764-1fa0999191be-kube-api-access-qj855\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.184760 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4aae0b60-50d8-497f-b764-1fa0999191be-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.184769 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6159e7ab-c333-4262-a31f-6da90b2cb002-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.525357 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jzf6h" event={"ID":"6159e7ab-c333-4262-a31f-6da90b2cb002","Type":"ContainerDied","Data":"ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797"} Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.525625 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac4e812bb1ad92d02d55a6eab62399db0a1c0317901165455bdea8107c493797" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.525382 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jzf6h" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.528101 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-24f8-account-create-update-w4k26" event={"ID":"4aae0b60-50d8-497f-b764-1fa0999191be","Type":"ContainerDied","Data":"fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd"} Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.528167 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe584e1cfa88a10ee3a02d22ad3f5b2fe7654848e16c7df03e945af08c070fcd" Dec 05 08:07:05 crc kubenswrapper[4863]: I1205 08:07:05.528220 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-24f8-account-create-update-w4k26" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.865162 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-fltbs"] Dec 05 08:07:06 crc kubenswrapper[4863]: E1205 08:07:06.865491 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aae0b60-50d8-497f-b764-1fa0999191be" containerName="mariadb-account-create-update" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.865503 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aae0b60-50d8-497f-b764-1fa0999191be" containerName="mariadb-account-create-update" Dec 05 08:07:06 crc kubenswrapper[4863]: E1205 08:07:06.865531 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6159e7ab-c333-4262-a31f-6da90b2cb002" containerName="mariadb-database-create" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.865539 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6159e7ab-c333-4262-a31f-6da90b2cb002" containerName="mariadb-database-create" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.865676 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6159e7ab-c333-4262-a31f-6da90b2cb002" containerName="mariadb-database-create" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.865697 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aae0b60-50d8-497f-b764-1fa0999191be" containerName="mariadb-account-create-update" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.866250 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.868409 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.875963 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.876009 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.878577 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-j9c4j" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.882773 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fltbs"] Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.916598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.916705 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmm8l\" (UniqueName: \"kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:06 crc kubenswrapper[4863]: I1205 08:07:06.916729 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.018628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmm8l\" (UniqueName: \"kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.018708 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.018785 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.024314 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.024783 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.039112 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmm8l\" (UniqueName: \"kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l\") pod \"keystone-db-sync-fltbs\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.182589 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:07 crc kubenswrapper[4863]: I1205 08:07:07.713648 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fltbs"] Dec 05 08:07:08 crc kubenswrapper[4863]: I1205 08:07:08.553159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fltbs" event={"ID":"eded1eaa-ee68-40a5-86ef-aa6c45567e01","Type":"ContainerStarted","Data":"a6345b6eed738f6edbc37f437e6e0a9958f79e08b5c4e2bf2a64940eeedd9552"} Dec 05 08:07:11 crc kubenswrapper[4863]: I1205 08:07:11.352515 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 08:07:13 crc kubenswrapper[4863]: I1205 08:07:13.599899 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fltbs" event={"ID":"eded1eaa-ee68-40a5-86ef-aa6c45567e01","Type":"ContainerStarted","Data":"1df9e5fc2a73d1c4cd83a9aae526fbcd6a091007698645149945fdfb19bc1d93"} Dec 05 08:07:13 crc kubenswrapper[4863]: I1205 08:07:13.639064 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-fltbs" podStartSLOduration=2.127729391 podStartE2EDuration="7.63902747s" podCreationTimestamp="2025-12-05 08:07:06 +0000 UTC" firstStartedPulling="2025-12-05 08:07:07.735192243 +0000 UTC m=+4855.461189323" lastFinishedPulling="2025-12-05 08:07:13.246490352 +0000 UTC m=+4860.972487402" observedRunningTime="2025-12-05 08:07:13.623752522 +0000 UTC m=+4861.349749642" watchObservedRunningTime="2025-12-05 08:07:13.63902747 +0000 UTC m=+4861.365024580" Dec 05 08:07:15 crc kubenswrapper[4863]: I1205 08:07:15.627623 4863 generic.go:334] "Generic (PLEG): container finished" podID="eded1eaa-ee68-40a5-86ef-aa6c45567e01" containerID="1df9e5fc2a73d1c4cd83a9aae526fbcd6a091007698645149945fdfb19bc1d93" exitCode=0 Dec 05 08:07:15 crc kubenswrapper[4863]: I1205 08:07:15.627737 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fltbs" event={"ID":"eded1eaa-ee68-40a5-86ef-aa6c45567e01","Type":"ContainerDied","Data":"1df9e5fc2a73d1c4cd83a9aae526fbcd6a091007698645149945fdfb19bc1d93"} Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.061364 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.241177 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmm8l\" (UniqueName: \"kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l\") pod \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.241519 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle\") pod \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.241605 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data\") pod \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\" (UID: \"eded1eaa-ee68-40a5-86ef-aa6c45567e01\") " Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.246913 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l" (OuterVolumeSpecName: "kube-api-access-dmm8l") pod "eded1eaa-ee68-40a5-86ef-aa6c45567e01" (UID: "eded1eaa-ee68-40a5-86ef-aa6c45567e01"). InnerVolumeSpecName "kube-api-access-dmm8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.263824 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eded1eaa-ee68-40a5-86ef-aa6c45567e01" (UID: "eded1eaa-ee68-40a5-86ef-aa6c45567e01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.299753 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data" (OuterVolumeSpecName: "config-data") pod "eded1eaa-ee68-40a5-86ef-aa6c45567e01" (UID: "eded1eaa-ee68-40a5-86ef-aa6c45567e01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.344154 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.344210 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmm8l\" (UniqueName: \"kubernetes.io/projected/eded1eaa-ee68-40a5-86ef-aa6c45567e01-kube-api-access-dmm8l\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.344231 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eded1eaa-ee68-40a5-86ef-aa6c45567e01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.660191 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fltbs" event={"ID":"eded1eaa-ee68-40a5-86ef-aa6c45567e01","Type":"ContainerDied","Data":"a6345b6eed738f6edbc37f437e6e0a9958f79e08b5c4e2bf2a64940eeedd9552"} Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.660253 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6345b6eed738f6edbc37f437e6e0a9958f79e08b5c4e2bf2a64940eeedd9552" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.660333 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fltbs" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.915428 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:07:17 crc kubenswrapper[4863]: E1205 08:07:17.915769 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eded1eaa-ee68-40a5-86ef-aa6c45567e01" containerName="keystone-db-sync" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.915781 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="eded1eaa-ee68-40a5-86ef-aa6c45567e01" containerName="keystone-db-sync" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.915992 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="eded1eaa-ee68-40a5-86ef-aa6c45567e01" containerName="keystone-db-sync" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.916995 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.944161 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.957945 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.958035 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbhtb\" (UniqueName: \"kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.958103 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.958123 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.958175 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.962927 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-htp5j"] Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.964021 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.969046 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.969267 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.969410 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.969871 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.969999 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-j9c4j" Dec 05 08:07:17 crc kubenswrapper[4863]: I1205 08:07:17.975523 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-htp5j"] Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060006 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060058 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060091 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060126 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060262 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbhtb\" (UniqueName: \"kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060325 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060424 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060501 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.060582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn5vc\" (UniqueName: \"kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.061061 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.061279 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.061546 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.061870 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.080162 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbhtb\" (UniqueName: \"kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb\") pod \"dnsmasq-dns-5bc5ffbc59-lfsqm\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.162708 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn5vc\" (UniqueName: \"kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.163041 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.163072 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.163107 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.163157 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.163200 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.167358 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.167732 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.167947 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.168607 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.170853 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.181033 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn5vc\" (UniqueName: \"kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc\") pod \"keystone-bootstrap-htp5j\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.241837 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.278407 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.758842 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:07:18 crc kubenswrapper[4863]: W1205 08:07:18.761068 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc940cc43_04e2_425c_81c7_227a2d03e3c9.slice/crio-993d1c5d5f3097c9702d7ef1a6556a73d6dfda5586b01f60bf15e30bbf4bbc87 WatchSource:0}: Error finding container 993d1c5d5f3097c9702d7ef1a6556a73d6dfda5586b01f60bf15e30bbf4bbc87: Status 404 returned error can't find the container with id 993d1c5d5f3097c9702d7ef1a6556a73d6dfda5586b01f60bf15e30bbf4bbc87 Dec 05 08:07:18 crc kubenswrapper[4863]: I1205 08:07:18.793730 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-htp5j"] Dec 05 08:07:18 crc kubenswrapper[4863]: W1205 08:07:18.817695 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe1b8a85_6014_4d48_ac40_beeaa1130690.slice/crio-e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098 WatchSource:0}: Error finding container e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098: Status 404 returned error can't find the container with id e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098 Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.681218 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-htp5j" event={"ID":"fe1b8a85-6014-4d48-ac40-beeaa1130690","Type":"ContainerStarted","Data":"dc2f8df4d8c17393318e5e4ec239fc45d00895a000edf10aad536c08a1ce8858"} Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.681608 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-htp5j" event={"ID":"fe1b8a85-6014-4d48-ac40-beeaa1130690","Type":"ContainerStarted","Data":"e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098"} Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.693163 4863 generic.go:334] "Generic (PLEG): container finished" podID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerID="e31c0d44f68e5e1e6de338c4f9801ae135811aec616601b2086d40bdc509ccb2" exitCode=0 Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.693245 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" event={"ID":"c940cc43-04e2-425c-81c7-227a2d03e3c9","Type":"ContainerDied","Data":"e31c0d44f68e5e1e6de338c4f9801ae135811aec616601b2086d40bdc509ccb2"} Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.693279 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" event={"ID":"c940cc43-04e2-425c-81c7-227a2d03e3c9","Type":"ContainerStarted","Data":"993d1c5d5f3097c9702d7ef1a6556a73d6dfda5586b01f60bf15e30bbf4bbc87"} Dec 05 08:07:19 crc kubenswrapper[4863]: I1205 08:07:19.742951 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-htp5j" podStartSLOduration=2.742932038 podStartE2EDuration="2.742932038s" podCreationTimestamp="2025-12-05 08:07:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:07:19.715852596 +0000 UTC m=+4867.441849676" watchObservedRunningTime="2025-12-05 08:07:19.742932038 +0000 UTC m=+4867.468929088" Dec 05 08:07:20 crc kubenswrapper[4863]: I1205 08:07:20.705395 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" event={"ID":"c940cc43-04e2-425c-81c7-227a2d03e3c9","Type":"ContainerStarted","Data":"03c96562b9da42e367a8119ce67dd18a5190fe5225832ed9334354d975171554"} Dec 05 08:07:20 crc kubenswrapper[4863]: I1205 08:07:20.705689 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:20 crc kubenswrapper[4863]: I1205 08:07:20.738920 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" podStartSLOduration=3.738901417 podStartE2EDuration="3.738901417s" podCreationTimestamp="2025-12-05 08:07:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:07:20.734182423 +0000 UTC m=+4868.460179533" watchObservedRunningTime="2025-12-05 08:07:20.738901417 +0000 UTC m=+4868.464898457" Dec 05 08:07:22 crc kubenswrapper[4863]: I1205 08:07:22.723928 4863 generic.go:334] "Generic (PLEG): container finished" podID="fe1b8a85-6014-4d48-ac40-beeaa1130690" containerID="dc2f8df4d8c17393318e5e4ec239fc45d00895a000edf10aad536c08a1ce8858" exitCode=0 Dec 05 08:07:22 crc kubenswrapper[4863]: I1205 08:07:22.724001 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-htp5j" event={"ID":"fe1b8a85-6014-4d48-ac40-beeaa1130690","Type":"ContainerDied","Data":"dc2f8df4d8c17393318e5e4ec239fc45d00895a000edf10aad536c08a1ce8858"} Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.107835 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273664 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn5vc\" (UniqueName: \"kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273805 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273864 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273912 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273971 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.273991 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts\") pod \"fe1b8a85-6014-4d48-ac40-beeaa1130690\" (UID: \"fe1b8a85-6014-4d48-ac40-beeaa1130690\") " Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.280706 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.280730 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc" (OuterVolumeSpecName: "kube-api-access-qn5vc") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "kube-api-access-qn5vc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.280944 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts" (OuterVolumeSpecName: "scripts") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.281501 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.299360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.302647 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data" (OuterVolumeSpecName: "config-data") pod "fe1b8a85-6014-4d48-ac40-beeaa1130690" (UID: "fe1b8a85-6014-4d48-ac40-beeaa1130690"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376336 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376383 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376397 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn5vc\" (UniqueName: \"kubernetes.io/projected/fe1b8a85-6014-4d48-ac40-beeaa1130690-kube-api-access-qn5vc\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376411 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376422 4863 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.376435 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe1b8a85-6014-4d48-ac40-beeaa1130690-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.742002 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-htp5j" event={"ID":"fe1b8a85-6014-4d48-ac40-beeaa1130690","Type":"ContainerDied","Data":"e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098"} Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.742305 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e81c9ac4f68e11c2452ef4e6f7d7f01cd9d75b399e0389ab56060502b1245098" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.742083 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-htp5j" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.821138 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-htp5j"] Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.827156 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-htp5j"] Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.915832 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-zlpld"] Dec 05 08:07:24 crc kubenswrapper[4863]: E1205 08:07:24.917402 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1b8a85-6014-4d48-ac40-beeaa1130690" containerName="keystone-bootstrap" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.917599 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1b8a85-6014-4d48-ac40-beeaa1130690" containerName="keystone-bootstrap" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.918095 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe1b8a85-6014-4d48-ac40-beeaa1130690" containerName="keystone-bootstrap" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.919281 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.930607 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zlpld"] Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.965341 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.965618 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.965758 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.965915 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-j9c4j" Dec 05 08:07:24 crc kubenswrapper[4863]: I1205 08:07:24.966409 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087294 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087354 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087375 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087607 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087641 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.087816 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wfbd\" (UniqueName: \"kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189221 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189322 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189355 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189421 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189451 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.189708 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wfbd\" (UniqueName: \"kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.198106 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.198223 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.199777 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.200182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.202727 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.225119 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wfbd\" (UniqueName: \"kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd\") pod \"keystone-bootstrap-zlpld\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.284382 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.719695 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zlpld"] Dec 05 08:07:25 crc kubenswrapper[4863]: W1205 08:07:25.731077 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7a59583_e60d_4de4_88c7_7b50a9cb00c4.slice/crio-06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7 WatchSource:0}: Error finding container 06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7: Status 404 returned error can't find the container with id 06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7 Dec 05 08:07:25 crc kubenswrapper[4863]: I1205 08:07:25.755585 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zlpld" event={"ID":"c7a59583-e60d-4de4-88c7-7b50a9cb00c4","Type":"ContainerStarted","Data":"06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7"} Dec 05 08:07:26 crc kubenswrapper[4863]: I1205 08:07:26.615576 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe1b8a85-6014-4d48-ac40-beeaa1130690" path="/var/lib/kubelet/pods/fe1b8a85-6014-4d48-ac40-beeaa1130690/volumes" Dec 05 08:07:26 crc kubenswrapper[4863]: I1205 08:07:26.770609 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zlpld" event={"ID":"c7a59583-e60d-4de4-88c7-7b50a9cb00c4","Type":"ContainerStarted","Data":"d5f76b78768df4699267611050f6c0e5d0b52488b062eff013f3bf98bb6e0266"} Dec 05 08:07:26 crc kubenswrapper[4863]: I1205 08:07:26.798908 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-zlpld" podStartSLOduration=2.798880236 podStartE2EDuration="2.798880236s" podCreationTimestamp="2025-12-05 08:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:07:26.79236864 +0000 UTC m=+4874.518365750" watchObservedRunningTime="2025-12-05 08:07:26.798880236 +0000 UTC m=+4874.524877316" Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.243618 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.335958 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.336557 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c687bc57-znhsk" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="dnsmasq-dns" containerID="cri-o://c3a647833955e88a0a26d1e3961303dd3a6da6668582ba1eb6426faa362556d9" gracePeriod=10 Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.789531 4863 generic.go:334] "Generic (PLEG): container finished" podID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerID="c3a647833955e88a0a26d1e3961303dd3a6da6668582ba1eb6426faa362556d9" exitCode=0 Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.789582 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c687bc57-znhsk" event={"ID":"b68180bd-6398-4a42-b0df-592d67c9fd17","Type":"ContainerDied","Data":"c3a647833955e88a0a26d1e3961303dd3a6da6668582ba1eb6426faa362556d9"} Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.885372 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.955679 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config\") pod \"b68180bd-6398-4a42-b0df-592d67c9fd17\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.955879 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7jmt\" (UniqueName: \"kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt\") pod \"b68180bd-6398-4a42-b0df-592d67c9fd17\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.955917 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb\") pod \"b68180bd-6398-4a42-b0df-592d67c9fd17\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.955941 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb\") pod \"b68180bd-6398-4a42-b0df-592d67c9fd17\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.955996 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc\") pod \"b68180bd-6398-4a42-b0df-592d67c9fd17\" (UID: \"b68180bd-6398-4a42-b0df-592d67c9fd17\") " Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.961568 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt" (OuterVolumeSpecName: "kube-api-access-x7jmt") pod "b68180bd-6398-4a42-b0df-592d67c9fd17" (UID: "b68180bd-6398-4a42-b0df-592d67c9fd17"). InnerVolumeSpecName "kube-api-access-x7jmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.995732 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config" (OuterVolumeSpecName: "config") pod "b68180bd-6398-4a42-b0df-592d67c9fd17" (UID: "b68180bd-6398-4a42-b0df-592d67c9fd17"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:28 crc kubenswrapper[4863]: I1205 08:07:28.998145 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b68180bd-6398-4a42-b0df-592d67c9fd17" (UID: "b68180bd-6398-4a42-b0df-592d67c9fd17"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.001933 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b68180bd-6398-4a42-b0df-592d67c9fd17" (UID: "b68180bd-6398-4a42-b0df-592d67c9fd17"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.007121 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b68180bd-6398-4a42-b0df-592d67c9fd17" (UID: "b68180bd-6398-4a42-b0df-592d67c9fd17"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.057392 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7jmt\" (UniqueName: \"kubernetes.io/projected/b68180bd-6398-4a42-b0df-592d67c9fd17-kube-api-access-x7jmt\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.057545 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.057620 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.057716 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.057771 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68180bd-6398-4a42-b0df-592d67c9fd17-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.804157 4863 generic.go:334] "Generic (PLEG): container finished" podID="c7a59583-e60d-4de4-88c7-7b50a9cb00c4" containerID="d5f76b78768df4699267611050f6c0e5d0b52488b062eff013f3bf98bb6e0266" exitCode=0 Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.804248 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zlpld" event={"ID":"c7a59583-e60d-4de4-88c7-7b50a9cb00c4","Type":"ContainerDied","Data":"d5f76b78768df4699267611050f6c0e5d0b52488b062eff013f3bf98bb6e0266"} Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.808158 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c687bc57-znhsk" event={"ID":"b68180bd-6398-4a42-b0df-592d67c9fd17","Type":"ContainerDied","Data":"95d7e63d3529da20a26be37e63f88429470687a2b7bdc587272bb1f7b572f0fc"} Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.808207 4863 scope.go:117] "RemoveContainer" containerID="c3a647833955e88a0a26d1e3961303dd3a6da6668582ba1eb6426faa362556d9" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.808292 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c687bc57-znhsk" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.838730 4863 scope.go:117] "RemoveContainer" containerID="a308fb507492fa67c2a48068e47e322051eb8f7d1c95f279c2430505a5447faf" Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.860507 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:07:29 crc kubenswrapper[4863]: I1205 08:07:29.869489 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c687bc57-znhsk"] Dec 05 08:07:30 crc kubenswrapper[4863]: I1205 08:07:30.621442 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" path="/var/lib/kubelet/pods/b68180bd-6398-4a42-b0df-592d67c9fd17/volumes" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.256245 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294000 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294051 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294095 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294136 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wfbd\" (UniqueName: \"kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294169 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.294208 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys\") pod \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\" (UID: \"c7a59583-e60d-4de4-88c7-7b50a9cb00c4\") " Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.301138 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.305708 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts" (OuterVolumeSpecName: "scripts") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.306302 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd" (OuterVolumeSpecName: "kube-api-access-8wfbd") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "kube-api-access-8wfbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.307880 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.340920 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data" (OuterVolumeSpecName: "config-data") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.345672 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7a59583-e60d-4de4-88c7-7b50a9cb00c4" (UID: "c7a59583-e60d-4de4-88c7-7b50a9cb00c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.397516 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.397995 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wfbd\" (UniqueName: \"kubernetes.io/projected/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-kube-api-access-8wfbd\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.398074 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.398221 4863 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.398291 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.398367 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59583-e60d-4de4-88c7-7b50a9cb00c4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.830949 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zlpld" event={"ID":"c7a59583-e60d-4de4-88c7-7b50a9cb00c4","Type":"ContainerDied","Data":"06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7"} Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.830999 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06a6427f432316e6b246e544ce54a6d5bf1737d6659bbfc0de07f7e67b5eaad7" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.831027 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zlpld" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.928933 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7cbdfccb-4r9s8"] Dec 05 08:07:31 crc kubenswrapper[4863]: E1205 08:07:31.929403 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="dnsmasq-dns" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.929431 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="dnsmasq-dns" Dec 05 08:07:31 crc kubenswrapper[4863]: E1205 08:07:31.929454 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="init" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.929463 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="init" Dec 05 08:07:31 crc kubenswrapper[4863]: E1205 08:07:31.929584 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a59583-e60d-4de4-88c7-7b50a9cb00c4" containerName="keystone-bootstrap" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.929594 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a59583-e60d-4de4-88c7-7b50a9cb00c4" containerName="keystone-bootstrap" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.929786 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68180bd-6398-4a42-b0df-592d67c9fd17" containerName="dnsmasq-dns" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.929804 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a59583-e60d-4de4-88c7-7b50a9cb00c4" containerName="keystone-bootstrap" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.930543 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.932968 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-j9c4j" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.933117 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.933442 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.933631 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:07:31 crc kubenswrapper[4863]: I1205 08:07:31.942097 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7cbdfccb-4r9s8"] Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109191 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-credential-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109257 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-fernet-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109330 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7wkw\" (UniqueName: \"kubernetes.io/projected/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-kube-api-access-f7wkw\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109375 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-scripts\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109434 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-config-data\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.109504 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-combined-ca-bundle\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210581 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-combined-ca-bundle\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210673 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-credential-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210700 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-fernet-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210780 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7wkw\" (UniqueName: \"kubernetes.io/projected/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-kube-api-access-f7wkw\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210821 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-scripts\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.210888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-config-data\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.215485 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-credential-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.215643 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-fernet-keys\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.215868 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-config-data\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.216217 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-combined-ca-bundle\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.216814 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-scripts\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.232052 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7wkw\" (UniqueName: \"kubernetes.io/projected/e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a-kube-api-access-f7wkw\") pod \"keystone-7cbdfccb-4r9s8\" (UID: \"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a\") " pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.266351 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.820578 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7cbdfccb-4r9s8"] Dec 05 08:07:32 crc kubenswrapper[4863]: I1205 08:07:32.842685 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7cbdfccb-4r9s8" event={"ID":"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a","Type":"ContainerStarted","Data":"8e6ea33115b047bf86d2598cfa54b5a657e01bb2ced07e88181af7356b2fd81b"} Dec 05 08:07:33 crc kubenswrapper[4863]: I1205 08:07:33.858349 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7cbdfccb-4r9s8" event={"ID":"e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a","Type":"ContainerStarted","Data":"6be0a6006d0927647284860a4bcff5c994b9662f261d2d92d74f826e08d79adf"} Dec 05 08:07:33 crc kubenswrapper[4863]: I1205 08:07:33.905360 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7cbdfccb-4r9s8" podStartSLOduration=2.905331522 podStartE2EDuration="2.905331522s" podCreationTimestamp="2025-12-05 08:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:07:33.892119113 +0000 UTC m=+4881.618116213" watchObservedRunningTime="2025-12-05 08:07:33.905331522 +0000 UTC m=+4881.631328602" Dec 05 08:07:34 crc kubenswrapper[4863]: I1205 08:07:34.865018 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:08:03 crc kubenswrapper[4863]: I1205 08:08:03.742536 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7cbdfccb-4r9s8" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.413298 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.415571 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.417331 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.417827 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.418130 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-q2sx6" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.422890 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.475988 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.476047 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.476215 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk6b4\" (UniqueName: \"kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.577425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.577851 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk6b4\" (UniqueName: \"kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.577958 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.578893 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.584264 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.605638 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk6b4\" (UniqueName: \"kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4\") pod \"openstackclient\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " pod="openstack/openstackclient" Dec 05 08:08:06 crc kubenswrapper[4863]: I1205 08:08:06.783594 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:08:07 crc kubenswrapper[4863]: I1205 08:08:07.860134 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:08:07 crc kubenswrapper[4863]: W1205 08:08:07.864366 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda134c4bd_77c4_40ca_9c30_565c1edf00ab.slice/crio-ac376e11f0bfed87e8b20f3a6a07790f5d700e3f9cccda75a2ec767f069fbecc WatchSource:0}: Error finding container ac376e11f0bfed87e8b20f3a6a07790f5d700e3f9cccda75a2ec767f069fbecc: Status 404 returned error can't find the container with id ac376e11f0bfed87e8b20f3a6a07790f5d700e3f9cccda75a2ec767f069fbecc Dec 05 08:08:08 crc kubenswrapper[4863]: I1205 08:08:08.201124 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a134c4bd-77c4-40ca-9c30-565c1edf00ab","Type":"ContainerStarted","Data":"ac376e11f0bfed87e8b20f3a6a07790f5d700e3f9cccda75a2ec767f069fbecc"} Dec 05 08:08:19 crc kubenswrapper[4863]: I1205 08:08:19.297703 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a134c4bd-77c4-40ca-9c30-565c1edf00ab","Type":"ContainerStarted","Data":"2262a2baba028670732e6e118f56344acc0a997c7db2c3f63a75be8ecf507486"} Dec 05 08:08:19 crc kubenswrapper[4863]: I1205 08:08:19.318417 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.707787016 podStartE2EDuration="13.318396386s" podCreationTimestamp="2025-12-05 08:08:06 +0000 UTC" firstStartedPulling="2025-12-05 08:08:07.866783821 +0000 UTC m=+4915.592780861" lastFinishedPulling="2025-12-05 08:08:18.477393161 +0000 UTC m=+4926.203390231" observedRunningTime="2025-12-05 08:08:19.316836758 +0000 UTC m=+4927.042833848" watchObservedRunningTime="2025-12-05 08:08:19.318396386 +0000 UTC m=+4927.044393426" Dec 05 08:08:40 crc kubenswrapper[4863]: E1205 08:08:40.456700 4863 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.106:48550->38.102.83.106:33381: write tcp 38.102.83.106:48550->38.102.83.106:33381: write: broken pipe Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.860010 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.863198 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.867121 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.879743 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm7xl\" (UniqueName: \"kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.879844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.879939 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.995488 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm7xl\" (UniqueName: \"kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.995575 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.995643 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.996136 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:42 crc kubenswrapper[4863]: I1205 08:08:42.996205 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.018529 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm7xl\" (UniqueName: \"kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl\") pod \"redhat-marketplace-qwhhv\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.072232 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.083307 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.088424 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.192220 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.202652 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chr6r\" (UniqueName: \"kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.202725 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.202810 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.308161 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chr6r\" (UniqueName: \"kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.308410 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.308487 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.309063 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.309058 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.326647 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chr6r\" (UniqueName: \"kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r\") pod \"redhat-operators-259fq\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.409971 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.503464 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.560173 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerStarted","Data":"51be2b20d6cbfe0d295abf1d9d9d19d9118ff9ebdd56e45a1712c1d853a59069"} Dec 05 08:08:43 crc kubenswrapper[4863]: I1205 08:08:43.853999 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:43 crc kubenswrapper[4863]: W1205 08:08:43.987603 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58dd3ffa_7eea_4093_a034_4182ba532bf4.slice/crio-81bd4235a5ac4cce510307a07214f8753f318148f78477238586f39bb7d30763 WatchSource:0}: Error finding container 81bd4235a5ac4cce510307a07214f8753f318148f78477238586f39bb7d30763: Status 404 returned error can't find the container with id 81bd4235a5ac4cce510307a07214f8753f318148f78477238586f39bb7d30763 Dec 05 08:08:44 crc kubenswrapper[4863]: I1205 08:08:44.568178 4863 generic.go:334] "Generic (PLEG): container finished" podID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerID="a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030" exitCode=0 Dec 05 08:08:44 crc kubenswrapper[4863]: I1205 08:08:44.568381 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerDied","Data":"a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030"} Dec 05 08:08:44 crc kubenswrapper[4863]: I1205 08:08:44.568602 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerStarted","Data":"81bd4235a5ac4cce510307a07214f8753f318148f78477238586f39bb7d30763"} Dec 05 08:08:44 crc kubenswrapper[4863]: I1205 08:08:44.570898 4863 generic.go:334] "Generic (PLEG): container finished" podID="7edec090-2944-4acd-becd-101521bae782" containerID="2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31" exitCode=0 Dec 05 08:08:44 crc kubenswrapper[4863]: I1205 08:08:44.570934 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerDied","Data":"2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31"} Dec 05 08:08:45 crc kubenswrapper[4863]: I1205 08:08:45.580582 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerStarted","Data":"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689"} Dec 05 08:08:45 crc kubenswrapper[4863]: I1205 08:08:45.583214 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerStarted","Data":"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c"} Dec 05 08:08:46 crc kubenswrapper[4863]: I1205 08:08:46.593594 4863 generic.go:334] "Generic (PLEG): container finished" podID="7edec090-2944-4acd-becd-101521bae782" containerID="43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689" exitCode=0 Dec 05 08:08:46 crc kubenswrapper[4863]: I1205 08:08:46.593738 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerDied","Data":"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689"} Dec 05 08:08:46 crc kubenswrapper[4863]: I1205 08:08:46.601900 4863 generic.go:334] "Generic (PLEG): container finished" podID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerID="6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c" exitCode=0 Dec 05 08:08:46 crc kubenswrapper[4863]: I1205 08:08:46.621959 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerDied","Data":"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c"} Dec 05 08:08:47 crc kubenswrapper[4863]: I1205 08:08:47.610290 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerStarted","Data":"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c"} Dec 05 08:08:47 crc kubenswrapper[4863]: I1205 08:08:47.612322 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerStarted","Data":"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc"} Dec 05 08:08:47 crc kubenswrapper[4863]: I1205 08:08:47.631757 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-259fq" podStartSLOduration=2.163114534 podStartE2EDuration="4.631737918s" podCreationTimestamp="2025-12-05 08:08:43 +0000 UTC" firstStartedPulling="2025-12-05 08:08:44.57029053 +0000 UTC m=+4952.296287570" lastFinishedPulling="2025-12-05 08:08:47.038913914 +0000 UTC m=+4954.764910954" observedRunningTime="2025-12-05 08:08:47.628359507 +0000 UTC m=+4955.354356547" watchObservedRunningTime="2025-12-05 08:08:47.631737918 +0000 UTC m=+4955.357734958" Dec 05 08:08:47 crc kubenswrapper[4863]: I1205 08:08:47.655860 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qwhhv" podStartSLOduration=3.226312958 podStartE2EDuration="5.655837629s" podCreationTimestamp="2025-12-05 08:08:42 +0000 UTC" firstStartedPulling="2025-12-05 08:08:44.572343889 +0000 UTC m=+4952.298340929" lastFinishedPulling="2025-12-05 08:08:47.00186856 +0000 UTC m=+4954.727865600" observedRunningTime="2025-12-05 08:08:47.653294768 +0000 UTC m=+4955.379291868" watchObservedRunningTime="2025-12-05 08:08:47.655837629 +0000 UTC m=+4955.381834669" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.193683 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.194300 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.242407 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.411614 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.411685 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.459978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.754343 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:53 crc kubenswrapper[4863]: I1205 08:08:53.763342 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:55 crc kubenswrapper[4863]: I1205 08:08:55.082904 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:55 crc kubenswrapper[4863]: I1205 08:08:55.704063 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-259fq" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="registry-server" containerID="cri-o://4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c" gracePeriod=2 Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.078890 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.079385 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qwhhv" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="registry-server" containerID="cri-o://9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc" gracePeriod=2 Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.297246 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.463315 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content\") pod \"58dd3ffa-7eea-4093-a034-4182ba532bf4\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.463376 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chr6r\" (UniqueName: \"kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r\") pod \"58dd3ffa-7eea-4093-a034-4182ba532bf4\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.463550 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities\") pod \"58dd3ffa-7eea-4093-a034-4182ba532bf4\" (UID: \"58dd3ffa-7eea-4093-a034-4182ba532bf4\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.466221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities" (OuterVolumeSpecName: "utilities") pod "58dd3ffa-7eea-4093-a034-4182ba532bf4" (UID: "58dd3ffa-7eea-4093-a034-4182ba532bf4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.492320 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r" (OuterVolumeSpecName: "kube-api-access-chr6r") pod "58dd3ffa-7eea-4093-a034-4182ba532bf4" (UID: "58dd3ffa-7eea-4093-a034-4182ba532bf4"). InnerVolumeSpecName "kube-api-access-chr6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.544594 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.565033 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chr6r\" (UniqueName: \"kubernetes.io/projected/58dd3ffa-7eea-4093-a034-4182ba532bf4-kube-api-access-chr6r\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.565062 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.575149 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58dd3ffa-7eea-4093-a034-4182ba532bf4" (UID: "58dd3ffa-7eea-4093-a034-4182ba532bf4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.668812 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content\") pod \"7edec090-2944-4acd-becd-101521bae782\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.669350 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities\") pod \"7edec090-2944-4acd-becd-101521bae782\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.669828 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm7xl\" (UniqueName: \"kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl\") pod \"7edec090-2944-4acd-becd-101521bae782\" (UID: \"7edec090-2944-4acd-becd-101521bae782\") " Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.670181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities" (OuterVolumeSpecName: "utilities") pod "7edec090-2944-4acd-becd-101521bae782" (UID: "7edec090-2944-4acd-becd-101521bae782"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.671174 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dd3ffa-7eea-4093-a034-4182ba532bf4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.671371 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.675860 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl" (OuterVolumeSpecName: "kube-api-access-pm7xl") pod "7edec090-2944-4acd-becd-101521bae782" (UID: "7edec090-2944-4acd-becd-101521bae782"). InnerVolumeSpecName "kube-api-access-pm7xl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.685749 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7edec090-2944-4acd-becd-101521bae782" (UID: "7edec090-2944-4acd-becd-101521bae782"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.717288 4863 generic.go:334] "Generic (PLEG): container finished" podID="7edec090-2944-4acd-becd-101521bae782" containerID="9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc" exitCode=0 Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.717337 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhhv" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.717370 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerDied","Data":"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc"} Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.717405 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhhv" event={"ID":"7edec090-2944-4acd-becd-101521bae782","Type":"ContainerDied","Data":"51be2b20d6cbfe0d295abf1d9d9d19d9118ff9ebdd56e45a1712c1d853a59069"} Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.717424 4863 scope.go:117] "RemoveContainer" containerID="9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.721931 4863 generic.go:334] "Generic (PLEG): container finished" podID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerID="4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c" exitCode=0 Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.721981 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-259fq" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.721988 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerDied","Data":"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c"} Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.722024 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-259fq" event={"ID":"58dd3ffa-7eea-4093-a034-4182ba532bf4","Type":"ContainerDied","Data":"81bd4235a5ac4cce510307a07214f8753f318148f78477238586f39bb7d30763"} Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.743828 4863 scope.go:117] "RemoveContainer" containerID="43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.768279 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.775770 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7edec090-2944-4acd-becd-101521bae782-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.775820 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm7xl\" (UniqueName: \"kubernetes.io/projected/7edec090-2944-4acd-becd-101521bae782-kube-api-access-pm7xl\") on node \"crc\" DevicePath \"\"" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.781284 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhhv"] Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.786021 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.791301 4863 scope.go:117] "RemoveContainer" containerID="2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.791951 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-259fq"] Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.812287 4863 scope.go:117] "RemoveContainer" containerID="9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.812761 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc\": container with ID starting with 9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc not found: ID does not exist" containerID="9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.812789 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc"} err="failed to get container status \"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc\": rpc error: code = NotFound desc = could not find container \"9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc\": container with ID starting with 9938881216afc51715ebe0e11705dd9c657f4e3ee98ffb75a15a6695a93599bc not found: ID does not exist" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.812809 4863 scope.go:117] "RemoveContainer" containerID="43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.813173 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689\": container with ID starting with 43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689 not found: ID does not exist" containerID="43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.813188 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689"} err="failed to get container status \"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689\": rpc error: code = NotFound desc = could not find container \"43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689\": container with ID starting with 43e4739e91f2ce775c763d2ad1ba4b071c5797aec96fb0faa40c5faa59762689 not found: ID does not exist" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.813201 4863 scope.go:117] "RemoveContainer" containerID="2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.813392 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31\": container with ID starting with 2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31 not found: ID does not exist" containerID="2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.813406 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31"} err="failed to get container status \"2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31\": rpc error: code = NotFound desc = could not find container \"2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31\": container with ID starting with 2f031bb0e09848aec279a5e87f1ea2d602d5af958172af43681462364fcc7f31 not found: ID does not exist" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.813422 4863 scope.go:117] "RemoveContainer" containerID="4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.859930 4863 scope.go:117] "RemoveContainer" containerID="6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.884888 4863 scope.go:117] "RemoveContainer" containerID="a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.914938 4863 scope.go:117] "RemoveContainer" containerID="4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.915673 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c\": container with ID starting with 4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c not found: ID does not exist" containerID="4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.915709 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c"} err="failed to get container status \"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c\": rpc error: code = NotFound desc = could not find container \"4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c\": container with ID starting with 4894bc9fa11d560b76e27306d23c8f83384c0c744cf6986a3a6c42143a308c8c not found: ID does not exist" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.915738 4863 scope.go:117] "RemoveContainer" containerID="6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.916094 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c\": container with ID starting with 6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c not found: ID does not exist" containerID="6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.916147 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c"} err="failed to get container status \"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c\": rpc error: code = NotFound desc = could not find container \"6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c\": container with ID starting with 6abad0420824151293722d56ef4049f27a607447b8c91a50ec7df7ca46b2b49c not found: ID does not exist" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.916177 4863 scope.go:117] "RemoveContainer" containerID="a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030" Dec 05 08:08:56 crc kubenswrapper[4863]: E1205 08:08:56.916713 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030\": container with ID starting with a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030 not found: ID does not exist" containerID="a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030" Dec 05 08:08:56 crc kubenswrapper[4863]: I1205 08:08:56.916743 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030"} err="failed to get container status \"a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030\": rpc error: code = NotFound desc = could not find container \"a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030\": container with ID starting with a223ee907a8d07a1612e32278d3847f2141a62ad8ee9a53ddc97de9a35e6d030 not found: ID does not exist" Dec 05 08:08:58 crc kubenswrapper[4863]: I1205 08:08:58.622114 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" path="/var/lib/kubelet/pods/58dd3ffa-7eea-4093-a034-4182ba532bf4/volumes" Dec 05 08:08:58 crc kubenswrapper[4863]: I1205 08:08:58.623212 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7edec090-2944-4acd-becd-101521bae782" path="/var/lib/kubelet/pods/7edec090-2944-4acd-becd-101521bae782/volumes" Dec 05 08:09:08 crc kubenswrapper[4863]: I1205 08:09:08.464406 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:09:08 crc kubenswrapper[4863]: I1205 08:09:08.465090 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:09:29 crc kubenswrapper[4863]: I1205 08:09:29.766691 4863 scope.go:117] "RemoveContainer" containerID="1864ad980ee328d5284de7f815980f89f122fccee5b5018a2657c71c0a53a4dc" Dec 05 08:09:29 crc kubenswrapper[4863]: I1205 08:09:29.989164 4863 scope.go:117] "RemoveContainer" containerID="4621ba4465233bea85c27f223167ffa8331a949bdfa38b766dc107cb46d8ad64" Dec 05 08:09:30 crc kubenswrapper[4863]: I1205 08:09:30.234060 4863 scope.go:117] "RemoveContainer" containerID="2f1b1c7d92ce4fe8119e3ff32f14c0720048341c2a479dd4f18edac7259e9664" Dec 05 08:09:30 crc kubenswrapper[4863]: I1205 08:09:30.282436 4863 scope.go:117] "RemoveContainer" containerID="94919f1892b8e589ee2b35833115990d40b4afa985fe9889cb72d75011bf4676" Dec 05 08:09:30 crc kubenswrapper[4863]: I1205 08:09:30.318079 4863 scope.go:117] "RemoveContainer" containerID="f943a92df5ae2b6e8004a03276c49a95d02f918affe8b1e40d9e8c892106e554" Dec 05 08:09:30 crc kubenswrapper[4863]: I1205 08:09:30.356875 4863 scope.go:117] "RemoveContainer" containerID="72cc767135e2e187720dc5122b36f1709a15edff3351b95db195de086f271ff5" Dec 05 08:09:30 crc kubenswrapper[4863]: I1205 08:09:30.379270 4863 scope.go:117] "RemoveContainer" containerID="983eeaf788b6587451ff71fda48c5f1d9306d8568b30e75fea3e812f45dfae3e" Dec 05 08:09:38 crc kubenswrapper[4863]: I1205 08:09:38.464036 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:09:38 crc kubenswrapper[4863]: I1205 08:09:38.464696 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.715241 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-pp6zp"] Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716301 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="extract-content" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716319 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="extract-content" Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716342 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="extract-utilities" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716353 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="extract-utilities" Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716372 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="extract-content" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716379 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="extract-content" Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716392 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716399 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716421 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="extract-utilities" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716428 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="extract-utilities" Dec 05 08:09:44 crc kubenswrapper[4863]: E1205 08:09:44.716441 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716449 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716681 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edec090-2944-4acd-becd-101521bae782" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.716711 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="58dd3ffa-7eea-4093-a034-4182ba532bf4" containerName="registry-server" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.717400 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.747887 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1a4b-account-create-update-27g2v"] Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.750662 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-pp6zp"] Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.750795 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.755897 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.760454 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1a4b-account-create-update-27g2v"] Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.866845 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gjqb\" (UniqueName: \"kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.866916 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.866941 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb5gp\" (UniqueName: \"kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.867260 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.968580 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.968650 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gjqb\" (UniqueName: \"kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.968713 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.968740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb5gp\" (UniqueName: \"kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.969529 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:44 crc kubenswrapper[4863]: I1205 08:09:44.970495 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.003792 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb5gp\" (UniqueName: \"kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp\") pod \"barbican-1a4b-account-create-update-27g2v\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.007736 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gjqb\" (UniqueName: \"kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb\") pod \"barbican-db-create-pp6zp\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.077759 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.089105 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.609759 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-pp6zp"] Dec 05 08:09:45 crc kubenswrapper[4863]: I1205 08:09:45.661264 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1a4b-account-create-update-27g2v"] Dec 05 08:09:45 crc kubenswrapper[4863]: W1205 08:09:45.668128 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06a3536c_978c_4eb9_8493_66d82c888911.slice/crio-c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695 WatchSource:0}: Error finding container c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695: Status 404 returned error can't find the container with id c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695 Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.270873 4863 generic.go:334] "Generic (PLEG): container finished" podID="06a3536c-978c-4eb9-8493-66d82c888911" containerID="5cba772715f61d0b35afba5d43b629d7cccf67a31c41602ecbcb3c2ba99f6e9d" exitCode=0 Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.271101 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a4b-account-create-update-27g2v" event={"ID":"06a3536c-978c-4eb9-8493-66d82c888911","Type":"ContainerDied","Data":"5cba772715f61d0b35afba5d43b629d7cccf67a31c41602ecbcb3c2ba99f6e9d"} Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.271368 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a4b-account-create-update-27g2v" event={"ID":"06a3536c-978c-4eb9-8493-66d82c888911","Type":"ContainerStarted","Data":"c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695"} Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.274093 4863 generic.go:334] "Generic (PLEG): container finished" podID="a2959946-59e3-4a24-b94c-c0435cf60fca" containerID="d2dff5629035fa87c068ff59d577620390692ccf0bda4e8c4d8af4e0ac3ac45e" exitCode=0 Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.274185 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-pp6zp" event={"ID":"a2959946-59e3-4a24-b94c-c0435cf60fca","Type":"ContainerDied","Data":"d2dff5629035fa87c068ff59d577620390692ccf0bda4e8c4d8af4e0ac3ac45e"} Dec 05 08:09:46 crc kubenswrapper[4863]: I1205 08:09:46.274237 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-pp6zp" event={"ID":"a2959946-59e3-4a24-b94c-c0435cf60fca","Type":"ContainerStarted","Data":"b2d7f413b2f11c51420267c065ba2c1e2fa1feb6602c0be06f1224925801fb6c"} Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.700870 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.707138 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.821099 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gjqb\" (UniqueName: \"kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb\") pod \"a2959946-59e3-4a24-b94c-c0435cf60fca\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.821524 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts\") pod \"06a3536c-978c-4eb9-8493-66d82c888911\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.821710 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts\") pod \"a2959946-59e3-4a24-b94c-c0435cf60fca\" (UID: \"a2959946-59e3-4a24-b94c-c0435cf60fca\") " Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.821752 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb5gp\" (UniqueName: \"kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp\") pod \"06a3536c-978c-4eb9-8493-66d82c888911\" (UID: \"06a3536c-978c-4eb9-8493-66d82c888911\") " Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.822214 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "06a3536c-978c-4eb9-8493-66d82c888911" (UID: "06a3536c-978c-4eb9-8493-66d82c888911"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.822216 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2959946-59e3-4a24-b94c-c0435cf60fca" (UID: "a2959946-59e3-4a24-b94c-c0435cf60fca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.827077 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp" (OuterVolumeSpecName: "kube-api-access-mb5gp") pod "06a3536c-978c-4eb9-8493-66d82c888911" (UID: "06a3536c-978c-4eb9-8493-66d82c888911"). InnerVolumeSpecName "kube-api-access-mb5gp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.830823 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb" (OuterVolumeSpecName: "kube-api-access-9gjqb") pod "a2959946-59e3-4a24-b94c-c0435cf60fca" (UID: "a2959946-59e3-4a24-b94c-c0435cf60fca"). InnerVolumeSpecName "kube-api-access-9gjqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.923365 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2959946-59e3-4a24-b94c-c0435cf60fca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.923403 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb5gp\" (UniqueName: \"kubernetes.io/projected/06a3536c-978c-4eb9-8493-66d82c888911-kube-api-access-mb5gp\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.923414 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gjqb\" (UniqueName: \"kubernetes.io/projected/a2959946-59e3-4a24-b94c-c0435cf60fca-kube-api-access-9gjqb\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:47 crc kubenswrapper[4863]: I1205 08:09:47.923425 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a3536c-978c-4eb9-8493-66d82c888911-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.297347 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a4b-account-create-update-27g2v" Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.297341 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a4b-account-create-update-27g2v" event={"ID":"06a3536c-978c-4eb9-8493-66d82c888911","Type":"ContainerDied","Data":"c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695"} Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.297522 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c466afad338d135b82e9991ee35d058e6cc89811d21813776f420ed38bcf5695" Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.299288 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-pp6zp" event={"ID":"a2959946-59e3-4a24-b94c-c0435cf60fca","Type":"ContainerDied","Data":"b2d7f413b2f11c51420267c065ba2c1e2fa1feb6602c0be06f1224925801fb6c"} Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.299312 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2d7f413b2f11c51420267c065ba2c1e2fa1feb6602c0be06f1224925801fb6c" Dec 05 08:09:48 crc kubenswrapper[4863]: I1205 08:09:48.299357 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-pp6zp" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.976657 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-85gqg"] Dec 05 08:09:49 crc kubenswrapper[4863]: E1205 08:09:49.977027 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a3536c-978c-4eb9-8493-66d82c888911" containerName="mariadb-account-create-update" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.977040 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a3536c-978c-4eb9-8493-66d82c888911" containerName="mariadb-account-create-update" Dec 05 08:09:49 crc kubenswrapper[4863]: E1205 08:09:49.977058 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2959946-59e3-4a24-b94c-c0435cf60fca" containerName="mariadb-database-create" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.977066 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2959946-59e3-4a24-b94c-c0435cf60fca" containerName="mariadb-database-create" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.977208 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a3536c-978c-4eb9-8493-66d82c888911" containerName="mariadb-account-create-update" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.977237 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2959946-59e3-4a24-b94c-c0435cf60fca" containerName="mariadb-database-create" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.977754 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.980141 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-x6b2g" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.980522 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 08:09:49 crc kubenswrapper[4863]: I1205 08:09:49.998856 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-85gqg"] Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.060009 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.060113 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.060196 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wfnz\" (UniqueName: \"kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.161970 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.162023 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.162108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wfnz\" (UniqueName: \"kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.167065 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.169335 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.194998 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wfnz\" (UniqueName: \"kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz\") pod \"barbican-db-sync-85gqg\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.340124 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:50 crc kubenswrapper[4863]: I1205 08:09:50.783821 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-85gqg"] Dec 05 08:09:51 crc kubenswrapper[4863]: I1205 08:09:51.324687 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-85gqg" event={"ID":"492ce819-1631-46d6-aff0-8d9e135116ef","Type":"ContainerStarted","Data":"af7c38b7c96d5a9a67c8bcfe5c99165de50d553c3ebd51f99d3438870d302390"} Dec 05 08:09:56 crc kubenswrapper[4863]: I1205 08:09:56.367433 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-85gqg" event={"ID":"492ce819-1631-46d6-aff0-8d9e135116ef","Type":"ContainerStarted","Data":"985ef58a47aaeceb3d311c0b79701bc46f1acfba38ec5641683866ad8c75ccc8"} Dec 05 08:09:56 crc kubenswrapper[4863]: I1205 08:09:56.388253 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-85gqg" podStartSLOduration=2.120306247 podStartE2EDuration="7.388231859s" podCreationTimestamp="2025-12-05 08:09:49 +0000 UTC" firstStartedPulling="2025-12-05 08:09:50.788859454 +0000 UTC m=+5018.514856494" lastFinishedPulling="2025-12-05 08:09:56.056785066 +0000 UTC m=+5023.782782106" observedRunningTime="2025-12-05 08:09:56.381495885 +0000 UTC m=+5024.107492925" watchObservedRunningTime="2025-12-05 08:09:56.388231859 +0000 UTC m=+5024.114228889" Dec 05 08:09:57 crc kubenswrapper[4863]: I1205 08:09:57.392464 4863 generic.go:334] "Generic (PLEG): container finished" podID="492ce819-1631-46d6-aff0-8d9e135116ef" containerID="985ef58a47aaeceb3d311c0b79701bc46f1acfba38ec5641683866ad8c75ccc8" exitCode=0 Dec 05 08:09:57 crc kubenswrapper[4863]: I1205 08:09:57.392934 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-85gqg" event={"ID":"492ce819-1631-46d6-aff0-8d9e135116ef","Type":"ContainerDied","Data":"985ef58a47aaeceb3d311c0b79701bc46f1acfba38ec5641683866ad8c75ccc8"} Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.804188 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.916195 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data\") pod \"492ce819-1631-46d6-aff0-8d9e135116ef\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.916308 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wfnz\" (UniqueName: \"kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz\") pod \"492ce819-1631-46d6-aff0-8d9e135116ef\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.916420 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle\") pod \"492ce819-1631-46d6-aff0-8d9e135116ef\" (UID: \"492ce819-1631-46d6-aff0-8d9e135116ef\") " Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.921369 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "492ce819-1631-46d6-aff0-8d9e135116ef" (UID: "492ce819-1631-46d6-aff0-8d9e135116ef"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.926723 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz" (OuterVolumeSpecName: "kube-api-access-4wfnz") pod "492ce819-1631-46d6-aff0-8d9e135116ef" (UID: "492ce819-1631-46d6-aff0-8d9e135116ef"). InnerVolumeSpecName "kube-api-access-4wfnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:09:58 crc kubenswrapper[4863]: I1205 08:09:58.964608 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "492ce819-1631-46d6-aff0-8d9e135116ef" (UID: "492ce819-1631-46d6-aff0-8d9e135116ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.018770 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.018808 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wfnz\" (UniqueName: \"kubernetes.io/projected/492ce819-1631-46d6-aff0-8d9e135116ef-kube-api-access-4wfnz\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.018819 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/492ce819-1631-46d6-aff0-8d9e135116ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.420289 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-85gqg" event={"ID":"492ce819-1631-46d6-aff0-8d9e135116ef","Type":"ContainerDied","Data":"af7c38b7c96d5a9a67c8bcfe5c99165de50d553c3ebd51f99d3438870d302390"} Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.420647 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af7c38b7c96d5a9a67c8bcfe5c99165de50d553c3ebd51f99d3438870d302390" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.420367 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-85gqg" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.637394 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5546755c6b-ncfhj"] Dec 05 08:09:59 crc kubenswrapper[4863]: E1205 08:09:59.637765 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="492ce819-1631-46d6-aff0-8d9e135116ef" containerName="barbican-db-sync" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.637782 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="492ce819-1631-46d6-aff0-8d9e135116ef" containerName="barbican-db-sync" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.637960 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="492ce819-1631-46d6-aff0-8d9e135116ef" containerName="barbican-db-sync" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.638735 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.644120 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.644400 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.646244 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-x6b2g" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.646395 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-85b58948c5-nhcqr"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.648019 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.651487 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.725857 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-85b58948c5-nhcqr"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733479 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733545 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bc7dddf-4b40-4664-b820-2de712eab7b2-logs\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733569 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-combined-ca-bundle\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733603 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-combined-ca-bundle\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733632 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733670 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks7lh\" (UniqueName: \"kubernetes.io/projected/9bc7dddf-4b40-4664-b820-2de712eab7b2-kube-api-access-ks7lh\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733720 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data-custom\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733741 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41826e68-cab3-4923-bd7f-ff6364c1c910-logs\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733760 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data-custom\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.733787 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mthv\" (UniqueName: \"kubernetes.io/projected/41826e68-cab3-4923-bd7f-ff6364c1c910-kube-api-access-6mthv\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.763615 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5546755c6b-ncfhj"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.773541 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.774946 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.809606 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835158 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data-custom\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835224 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835251 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mthv\" (UniqueName: \"kubernetes.io/projected/41826e68-cab3-4923-bd7f-ff6364c1c910-kube-api-access-6mthv\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835315 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835349 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bc7dddf-4b40-4664-b820-2de712eab7b2-logs\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835400 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-combined-ca-bundle\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835420 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-combined-ca-bundle\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835453 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835492 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkjmw\" (UniqueName: \"kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835523 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835541 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks7lh\" (UniqueName: \"kubernetes.io/projected/9bc7dddf-4b40-4664-b820-2de712eab7b2-kube-api-access-ks7lh\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835622 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data-custom\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.835640 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41826e68-cab3-4923-bd7f-ff6364c1c910-logs\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.836067 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41826e68-cab3-4923-bd7f-ff6364c1c910-logs\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.837668 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bc7dddf-4b40-4664-b820-2de712eab7b2-logs\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.846610 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-867846d5d6-vrk5r"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.846848 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.848393 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.856220 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.863374 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-config-data-custom\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.864164 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc7dddf-4b40-4664-b820-2de712eab7b2-combined-ca-bundle\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.865016 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mthv\" (UniqueName: \"kubernetes.io/projected/41826e68-cab3-4923-bd7f-ff6364c1c910-kube-api-access-6mthv\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.865645 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data-custom\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.867847 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-config-data\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.886750 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41826e68-cab3-4923-bd7f-ff6364c1c910-combined-ca-bundle\") pod \"barbican-worker-85b58948c5-nhcqr\" (UID: \"41826e68-cab3-4923-bd7f-ff6364c1c910\") " pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.892012 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks7lh\" (UniqueName: \"kubernetes.io/projected/9bc7dddf-4b40-4664-b820-2de712eab7b2-kube-api-access-ks7lh\") pod \"barbican-keystone-listener-5546755c6b-ncfhj\" (UID: \"9bc7dddf-4b40-4664-b820-2de712eab7b2\") " pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.894263 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-867846d5d6-vrk5r"] Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938103 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793c6422-8973-4613-aab9-56481761b45f-logs\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938190 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-combined-ca-bundle\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938256 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938303 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnqg5\" (UniqueName: \"kubernetes.io/projected/793c6422-8973-4613-aab9-56481761b45f-kube-api-access-cnqg5\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938335 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938397 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkjmw\" (UniqueName: \"kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938428 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data-custom\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.938456 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.941850 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.943919 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.943960 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.946016 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.959946 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkjmw\" (UniqueName: \"kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw\") pod \"dnsmasq-dns-5888d44f8f-ntw6b\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.962073 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" Dec 05 08:09:59 crc kubenswrapper[4863]: I1205 08:09:59.974835 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-85b58948c5-nhcqr" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.040540 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793c6422-8973-4613-aab9-56481761b45f-logs\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.040612 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.040641 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-combined-ca-bundle\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.040711 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnqg5\" (UniqueName: \"kubernetes.io/projected/793c6422-8973-4613-aab9-56481761b45f-kube-api-access-cnqg5\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.040795 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data-custom\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.045009 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data-custom\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.045032 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793c6422-8973-4613-aab9-56481761b45f-logs\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.049467 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-combined-ca-bundle\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.050530 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/793c6422-8973-4613-aab9-56481761b45f-config-data\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.061846 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnqg5\" (UniqueName: \"kubernetes.io/projected/793c6422-8973-4613-aab9-56481761b45f-kube-api-access-cnqg5\") pod \"barbican-api-867846d5d6-vrk5r\" (UID: \"793c6422-8973-4613-aab9-56481761b45f\") " pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.072177 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.123578 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.487679 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5546755c6b-ncfhj"] Dec 05 08:10:00 crc kubenswrapper[4863]: W1205 08:10:00.490860 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bc7dddf_4b40_4664_b820_2de712eab7b2.slice/crio-696918c8b3535b608ae93962b432b453a840cfc7ac73ae6e98a5dc36364c06d1 WatchSource:0}: Error finding container 696918c8b3535b608ae93962b432b453a840cfc7ac73ae6e98a5dc36364c06d1: Status 404 returned error can't find the container with id 696918c8b3535b608ae93962b432b453a840cfc7ac73ae6e98a5dc36364c06d1 Dec 05 08:10:00 crc kubenswrapper[4863]: W1205 08:10:00.536572 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41826e68_cab3_4923_bd7f_ff6364c1c910.slice/crio-b8cf26e5b78fe2326c7a41ea60c84b40bb4b0dae5482cbc5d95673f8ca06f897 WatchSource:0}: Error finding container b8cf26e5b78fe2326c7a41ea60c84b40bb4b0dae5482cbc5d95673f8ca06f897: Status 404 returned error can't find the container with id b8cf26e5b78fe2326c7a41ea60c84b40bb4b0dae5482cbc5d95673f8ca06f897 Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.537311 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-85b58948c5-nhcqr"] Dec 05 08:10:00 crc kubenswrapper[4863]: W1205 08:10:00.604207 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod793c6422_8973_4613_aab9_56481761b45f.slice/crio-53bfbe6223994afb87aed1126950032058db1a733f94f18ee79a23201a954abf WatchSource:0}: Error finding container 53bfbe6223994afb87aed1126950032058db1a733f94f18ee79a23201a954abf: Status 404 returned error can't find the container with id 53bfbe6223994afb87aed1126950032058db1a733f94f18ee79a23201a954abf Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.616066 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-867846d5d6-vrk5r"] Dec 05 08:10:00 crc kubenswrapper[4863]: I1205 08:10:00.675848 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.438586 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" event={"ID":"9bc7dddf-4b40-4664-b820-2de712eab7b2","Type":"ContainerStarted","Data":"696918c8b3535b608ae93962b432b453a840cfc7ac73ae6e98a5dc36364c06d1"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.439726 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-85b58948c5-nhcqr" event={"ID":"41826e68-cab3-4923-bd7f-ff6364c1c910","Type":"ContainerStarted","Data":"b8cf26e5b78fe2326c7a41ea60c84b40bb4b0dae5482cbc5d95673f8ca06f897"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.445248 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-867846d5d6-vrk5r" event={"ID":"793c6422-8973-4613-aab9-56481761b45f","Type":"ContainerStarted","Data":"c2231fea50500bd99a74d7265ebfb57c66d1fa1f62e695d211764945d75a4109"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.445310 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-867846d5d6-vrk5r" event={"ID":"793c6422-8973-4613-aab9-56481761b45f","Type":"ContainerStarted","Data":"81212d651c465cec985279b977c982eeca303e6b1971ed95b02777cd3191f389"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.445323 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-867846d5d6-vrk5r" event={"ID":"793c6422-8973-4613-aab9-56481761b45f","Type":"ContainerStarted","Data":"53bfbe6223994afb87aed1126950032058db1a733f94f18ee79a23201a954abf"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.445385 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.445441 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.452290 4863 generic.go:334] "Generic (PLEG): container finished" podID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerID="6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2" exitCode=0 Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.452482 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" event={"ID":"4c4d255b-656c-4203-8860-1b6196eb3ac4","Type":"ContainerDied","Data":"6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.452542 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" event={"ID":"4c4d255b-656c-4203-8860-1b6196eb3ac4","Type":"ContainerStarted","Data":"1d480ea8ece95eed9e45a38bd1761dec7446779c6482251a74dd2b224fbef902"} Dec 05 08:10:01 crc kubenswrapper[4863]: I1205 08:10:01.466699 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-867846d5d6-vrk5r" podStartSLOduration=2.466677294 podStartE2EDuration="2.466677294s" podCreationTimestamp="2025-12-05 08:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:10:01.460455153 +0000 UTC m=+5029.186452193" watchObservedRunningTime="2025-12-05 08:10:01.466677294 +0000 UTC m=+5029.192674334" Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.462093 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" event={"ID":"9bc7dddf-4b40-4664-b820-2de712eab7b2","Type":"ContainerStarted","Data":"7d687edac8c073e8b3b8176c83357a2b53c5459aca12f188dbb046cd2a259f7d"} Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.462393 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" event={"ID":"9bc7dddf-4b40-4664-b820-2de712eab7b2","Type":"ContainerStarted","Data":"d427d0724ca0881274e52593c1dafbedc3b81a4bb71fcb84e793de95acc695b2"} Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.467435 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-85b58948c5-nhcqr" event={"ID":"41826e68-cab3-4923-bd7f-ff6364c1c910","Type":"ContainerStarted","Data":"0611fb0be70fe98663dc5bc7c484fdd6c131b192887a9541da14bac5152021b6"} Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.467462 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-85b58948c5-nhcqr" event={"ID":"41826e68-cab3-4923-bd7f-ff6364c1c910","Type":"ContainerStarted","Data":"4e2b7c4e29cba3af1e74c813bc1effd08204dde422cd0212d189bfdfabe33876"} Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.471647 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" event={"ID":"4c4d255b-656c-4203-8860-1b6196eb3ac4","Type":"ContainerStarted","Data":"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa"} Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.471674 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.491622 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5546755c6b-ncfhj" podStartSLOduration=2.128713263 podStartE2EDuration="3.491607663s" podCreationTimestamp="2025-12-05 08:09:59 +0000 UTC" firstStartedPulling="2025-12-05 08:10:00.492620489 +0000 UTC m=+5028.218617529" lastFinishedPulling="2025-12-05 08:10:01.855514899 +0000 UTC m=+5029.581511929" observedRunningTime="2025-12-05 08:10:02.485140876 +0000 UTC m=+5030.211137916" watchObservedRunningTime="2025-12-05 08:10:02.491607663 +0000 UTC m=+5030.217604693" Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.506648 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" podStartSLOduration=3.506626878 podStartE2EDuration="3.506626878s" podCreationTimestamp="2025-12-05 08:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:10:02.504076525 +0000 UTC m=+5030.230073595" watchObservedRunningTime="2025-12-05 08:10:02.506626878 +0000 UTC m=+5030.232623928" Dec 05 08:10:02 crc kubenswrapper[4863]: I1205 08:10:02.529091 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-85b58948c5-nhcqr" podStartSLOduration=2.181398811 podStartE2EDuration="3.529075812s" podCreationTimestamp="2025-12-05 08:09:59 +0000 UTC" firstStartedPulling="2025-12-05 08:10:00.541353651 +0000 UTC m=+5028.267350691" lastFinishedPulling="2025-12-05 08:10:01.889030652 +0000 UTC m=+5029.615027692" observedRunningTime="2025-12-05 08:10:02.524620074 +0000 UTC m=+5030.250617134" watchObservedRunningTime="2025-12-05 08:10:02.529075812 +0000 UTC m=+5030.255072852" Dec 05 08:10:06 crc kubenswrapper[4863]: I1205 08:10:06.559052 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:07 crc kubenswrapper[4863]: I1205 08:10:07.965068 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-867846d5d6-vrk5r" Dec 05 08:10:08 crc kubenswrapper[4863]: I1205 08:10:08.463944 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:10:08 crc kubenswrapper[4863]: I1205 08:10:08.464542 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:10:08 crc kubenswrapper[4863]: I1205 08:10:08.464717 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:10:08 crc kubenswrapper[4863]: I1205 08:10:08.465695 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:10:08 crc kubenswrapper[4863]: I1205 08:10:08.465877 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" gracePeriod=600 Dec 05 08:10:08 crc kubenswrapper[4863]: E1205 08:10:08.584182 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:10:09 crc kubenswrapper[4863]: I1205 08:10:09.542677 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" exitCode=0 Dec 05 08:10:09 crc kubenswrapper[4863]: I1205 08:10:09.542714 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2"} Dec 05 08:10:09 crc kubenswrapper[4863]: I1205 08:10:09.542755 4863 scope.go:117] "RemoveContainer" containerID="e2d70452a6134efae8c51c9d279a146e4f17d7c210af1b1b99dfa2feb9821817" Dec 05 08:10:09 crc kubenswrapper[4863]: I1205 08:10:09.543339 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:10:09 crc kubenswrapper[4863]: E1205 08:10:09.543706 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.126916 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.186225 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.186468 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="dnsmasq-dns" containerID="cri-o://03c96562b9da42e367a8119ce67dd18a5190fe5225832ed9334354d975171554" gracePeriod=10 Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.570269 4863 generic.go:334] "Generic (PLEG): container finished" podID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerID="03c96562b9da42e367a8119ce67dd18a5190fe5225832ed9334354d975171554" exitCode=0 Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.570311 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" event={"ID":"c940cc43-04e2-425c-81c7-227a2d03e3c9","Type":"ContainerDied","Data":"03c96562b9da42e367a8119ce67dd18a5190fe5225832ed9334354d975171554"} Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.788545 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.875048 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc\") pod \"c940cc43-04e2-425c-81c7-227a2d03e3c9\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.875103 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb\") pod \"c940cc43-04e2-425c-81c7-227a2d03e3c9\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.875133 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb\") pod \"c940cc43-04e2-425c-81c7-227a2d03e3c9\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.875227 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config\") pod \"c940cc43-04e2-425c-81c7-227a2d03e3c9\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.875271 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbhtb\" (UniqueName: \"kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb\") pod \"c940cc43-04e2-425c-81c7-227a2d03e3c9\" (UID: \"c940cc43-04e2-425c-81c7-227a2d03e3c9\") " Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.921895 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb" (OuterVolumeSpecName: "kube-api-access-sbhtb") pod "c940cc43-04e2-425c-81c7-227a2d03e3c9" (UID: "c940cc43-04e2-425c-81c7-227a2d03e3c9"). InnerVolumeSpecName "kube-api-access-sbhtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.922534 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c940cc43-04e2-425c-81c7-227a2d03e3c9" (UID: "c940cc43-04e2-425c-81c7-227a2d03e3c9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.944111 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c940cc43-04e2-425c-81c7-227a2d03e3c9" (UID: "c940cc43-04e2-425c-81c7-227a2d03e3c9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.950230 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config" (OuterVolumeSpecName: "config") pod "c940cc43-04e2-425c-81c7-227a2d03e3c9" (UID: "c940cc43-04e2-425c-81c7-227a2d03e3c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.967920 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c940cc43-04e2-425c-81c7-227a2d03e3c9" (UID: "c940cc43-04e2-425c-81c7-227a2d03e3c9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.976691 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbhtb\" (UniqueName: \"kubernetes.io/projected/c940cc43-04e2-425c-81c7-227a2d03e3c9-kube-api-access-sbhtb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.976723 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.976734 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.976742 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:10 crc kubenswrapper[4863]: I1205 08:10:10.976749 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c940cc43-04e2-425c-81c7-227a2d03e3c9-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.587226 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" event={"ID":"c940cc43-04e2-425c-81c7-227a2d03e3c9","Type":"ContainerDied","Data":"993d1c5d5f3097c9702d7ef1a6556a73d6dfda5586b01f60bf15e30bbf4bbc87"} Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.587590 4863 scope.go:117] "RemoveContainer" containerID="03c96562b9da42e367a8119ce67dd18a5190fe5225832ed9334354d975171554" Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.587822 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc5ffbc59-lfsqm" Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.618368 4863 scope.go:117] "RemoveContainer" containerID="e31c0d44f68e5e1e6de338c4f9801ae135811aec616601b2086d40bdc509ccb2" Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.640034 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:10:11 crc kubenswrapper[4863]: I1205 08:10:11.646935 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bc5ffbc59-lfsqm"] Dec 05 08:10:12 crc kubenswrapper[4863]: I1205 08:10:12.616423 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" path="/var/lib/kubelet/pods/c940cc43-04e2-425c-81c7-227a2d03e3c9/volumes" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.104078 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-7pptc"] Dec 05 08:10:20 crc kubenswrapper[4863]: E1205 08:10:20.105107 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="init" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.105125 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="init" Dec 05 08:10:20 crc kubenswrapper[4863]: E1205 08:10:20.105163 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="dnsmasq-dns" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.105170 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="dnsmasq-dns" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.105351 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c940cc43-04e2-425c-81c7-227a2d03e3c9" containerName="dnsmasq-dns" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.106054 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.119233 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7pptc"] Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.207945 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8a5d-account-create-update-txbg6"] Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.209098 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.211085 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.221306 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8a5d-account-create-update-txbg6"] Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.231570 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.231924 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjbp8\" (UniqueName: \"kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.333893 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.333973 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h69p2\" (UniqueName: \"kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.334054 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.334110 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjbp8\" (UniqueName: \"kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.334931 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.355951 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjbp8\" (UniqueName: \"kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8\") pod \"neutron-db-create-7pptc\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.434139 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.435501 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.435556 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h69p2\" (UniqueName: \"kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.436856 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.455039 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h69p2\" (UniqueName: \"kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2\") pod \"neutron-8a5d-account-create-update-txbg6\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.529896 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:20 crc kubenswrapper[4863]: I1205 08:10:20.856675 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7pptc"] Dec 05 08:10:20 crc kubenswrapper[4863]: W1205 08:10:20.856706 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7dcee0e_bc20_45a8_8f25_088c0fa00c73.slice/crio-3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b WatchSource:0}: Error finding container 3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b: Status 404 returned error can't find the container with id 3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.051012 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8a5d-account-create-update-txbg6"] Dec 05 08:10:21 crc kubenswrapper[4863]: W1205 08:10:21.058022 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93e2efad_0215_41d1_a32b_62b1c92c6576.slice/crio-8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894 WatchSource:0}: Error finding container 8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894: Status 404 returned error can't find the container with id 8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894 Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.685246 4863 generic.go:334] "Generic (PLEG): container finished" podID="93e2efad-0215-41d1-a32b-62b1c92c6576" containerID="3a0aa842d1e339d9f4b779f7b18161be5424c694dd03288204f0ff8c8c2127c0" exitCode=0 Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.685326 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a5d-account-create-update-txbg6" event={"ID":"93e2efad-0215-41d1-a32b-62b1c92c6576","Type":"ContainerDied","Data":"3a0aa842d1e339d9f4b779f7b18161be5424c694dd03288204f0ff8c8c2127c0"} Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.685359 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a5d-account-create-update-txbg6" event={"ID":"93e2efad-0215-41d1-a32b-62b1c92c6576","Type":"ContainerStarted","Data":"8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894"} Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.687587 4863 generic.go:334] "Generic (PLEG): container finished" podID="b7dcee0e-bc20-45a8-8f25-088c0fa00c73" containerID="e87f1f3405cea4e3a10a44508f43a2fe986422e5e7167ae8a1982d61482c442a" exitCode=0 Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.687621 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7pptc" event={"ID":"b7dcee0e-bc20-45a8-8f25-088c0fa00c73","Type":"ContainerDied","Data":"e87f1f3405cea4e3a10a44508f43a2fe986422e5e7167ae8a1982d61482c442a"} Dec 05 08:10:21 crc kubenswrapper[4863]: I1205 08:10:21.687646 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7pptc" event={"ID":"b7dcee0e-bc20-45a8-8f25-088c0fa00c73","Type":"ContainerStarted","Data":"3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b"} Dec 05 08:10:22 crc kubenswrapper[4863]: I1205 08:10:22.623224 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:10:22 crc kubenswrapper[4863]: E1205 08:10:22.624099 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.088034 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.094505 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.205741 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts\") pod \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.205860 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h69p2\" (UniqueName: \"kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2\") pod \"93e2efad-0215-41d1-a32b-62b1c92c6576\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.205886 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjbp8\" (UniqueName: \"kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8\") pod \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\" (UID: \"b7dcee0e-bc20-45a8-8f25-088c0fa00c73\") " Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.205922 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts\") pod \"93e2efad-0215-41d1-a32b-62b1c92c6576\" (UID: \"93e2efad-0215-41d1-a32b-62b1c92c6576\") " Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.206495 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b7dcee0e-bc20-45a8-8f25-088c0fa00c73" (UID: "b7dcee0e-bc20-45a8-8f25-088c0fa00c73"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.206776 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "93e2efad-0215-41d1-a32b-62b1c92c6576" (UID: "93e2efad-0215-41d1-a32b-62b1c92c6576"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.211112 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8" (OuterVolumeSpecName: "kube-api-access-tjbp8") pod "b7dcee0e-bc20-45a8-8f25-088c0fa00c73" (UID: "b7dcee0e-bc20-45a8-8f25-088c0fa00c73"). InnerVolumeSpecName "kube-api-access-tjbp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.211301 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2" (OuterVolumeSpecName: "kube-api-access-h69p2") pod "93e2efad-0215-41d1-a32b-62b1c92c6576" (UID: "93e2efad-0215-41d1-a32b-62b1c92c6576"). InnerVolumeSpecName "kube-api-access-h69p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.307934 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h69p2\" (UniqueName: \"kubernetes.io/projected/93e2efad-0215-41d1-a32b-62b1c92c6576-kube-api-access-h69p2\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.307966 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjbp8\" (UniqueName: \"kubernetes.io/projected/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-kube-api-access-tjbp8\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.307976 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93e2efad-0215-41d1-a32b-62b1c92c6576-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.307985 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7dcee0e-bc20-45a8-8f25-088c0fa00c73-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.716972 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a5d-account-create-update-txbg6" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.716963 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a5d-account-create-update-txbg6" event={"ID":"93e2efad-0215-41d1-a32b-62b1c92c6576","Type":"ContainerDied","Data":"8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894"} Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.717172 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ea7b9ec88848cce6330ee72dc1e8bb0cac5e44a7f16cd333771ea5ee24f0894" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.719397 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7pptc" event={"ID":"b7dcee0e-bc20-45a8-8f25-088c0fa00c73","Type":"ContainerDied","Data":"3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b"} Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.719655 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3db888f616c9efca8e035fc42d8f8cf8b29a9f56115d35fdd92a5e8984a19b1b" Dec 05 08:10:23 crc kubenswrapper[4863]: I1205 08:10:23.719437 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7pptc" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.451151 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-9bxkw"] Dec 05 08:10:25 crc kubenswrapper[4863]: E1205 08:10:25.451861 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e2efad-0215-41d1-a32b-62b1c92c6576" containerName="mariadb-account-create-update" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.451877 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e2efad-0215-41d1-a32b-62b1c92c6576" containerName="mariadb-account-create-update" Dec 05 08:10:25 crc kubenswrapper[4863]: E1205 08:10:25.451918 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7dcee0e-bc20-45a8-8f25-088c0fa00c73" containerName="mariadb-database-create" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.451926 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7dcee0e-bc20-45a8-8f25-088c0fa00c73" containerName="mariadb-database-create" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.452146 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e2efad-0215-41d1-a32b-62b1c92c6576" containerName="mariadb-account-create-update" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.452164 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7dcee0e-bc20-45a8-8f25-088c0fa00c73" containerName="mariadb-database-create" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.452842 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.455816 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.455909 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.456390 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lgrvx" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.461430 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-9bxkw"] Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.548978 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.549332 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s88qb\" (UniqueName: \"kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.549391 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.651011 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.651165 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.651203 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s88qb\" (UniqueName: \"kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.657150 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.657384 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.670049 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s88qb\" (UniqueName: \"kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb\") pod \"neutron-db-sync-9bxkw\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:25 crc kubenswrapper[4863]: I1205 08:10:25.783924 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:26 crc kubenswrapper[4863]: I1205 08:10:26.200835 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-9bxkw"] Dec 05 08:10:26 crc kubenswrapper[4863]: I1205 08:10:26.759797 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9bxkw" event={"ID":"1898d5da-0166-4d68-9fb9-95f980359e8c","Type":"ContainerStarted","Data":"c09ab9f39395160fd517680b736ec63532ecc5ba5e1f95cb5a8e29d6a1ccab52"} Dec 05 08:10:26 crc kubenswrapper[4863]: I1205 08:10:26.760388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9bxkw" event={"ID":"1898d5da-0166-4d68-9fb9-95f980359e8c","Type":"ContainerStarted","Data":"4d6e6fb3f13baef104dfbc646e67bec3ed5f81c52da711111f67c8c4acf2d241"} Dec 05 08:10:26 crc kubenswrapper[4863]: I1205 08:10:26.779589 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-9bxkw" podStartSLOduration=1.779572715 podStartE2EDuration="1.779572715s" podCreationTimestamp="2025-12-05 08:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:10:26.774130323 +0000 UTC m=+5054.500127363" watchObservedRunningTime="2025-12-05 08:10:26.779572715 +0000 UTC m=+5054.505569755" Dec 05 08:10:30 crc kubenswrapper[4863]: I1205 08:10:30.794096 4863 generic.go:334] "Generic (PLEG): container finished" podID="1898d5da-0166-4d68-9fb9-95f980359e8c" containerID="c09ab9f39395160fd517680b736ec63532ecc5ba5e1f95cb5a8e29d6a1ccab52" exitCode=0 Dec 05 08:10:30 crc kubenswrapper[4863]: I1205 08:10:30.794309 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9bxkw" event={"ID":"1898d5da-0166-4d68-9fb9-95f980359e8c","Type":"ContainerDied","Data":"c09ab9f39395160fd517680b736ec63532ecc5ba5e1f95cb5a8e29d6a1ccab52"} Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.103690 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.179063 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle\") pod \"1898d5da-0166-4d68-9fb9-95f980359e8c\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.179111 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s88qb\" (UniqueName: \"kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb\") pod \"1898d5da-0166-4d68-9fb9-95f980359e8c\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.179135 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config\") pod \"1898d5da-0166-4d68-9fb9-95f980359e8c\" (UID: \"1898d5da-0166-4d68-9fb9-95f980359e8c\") " Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.184226 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb" (OuterVolumeSpecName: "kube-api-access-s88qb") pod "1898d5da-0166-4d68-9fb9-95f980359e8c" (UID: "1898d5da-0166-4d68-9fb9-95f980359e8c"). InnerVolumeSpecName "kube-api-access-s88qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.201126 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1898d5da-0166-4d68-9fb9-95f980359e8c" (UID: "1898d5da-0166-4d68-9fb9-95f980359e8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.202956 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config" (OuterVolumeSpecName: "config") pod "1898d5da-0166-4d68-9fb9-95f980359e8c" (UID: "1898d5da-0166-4d68-9fb9-95f980359e8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.281183 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.281209 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s88qb\" (UniqueName: \"kubernetes.io/projected/1898d5da-0166-4d68-9fb9-95f980359e8c-kube-api-access-s88qb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.281218 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1898d5da-0166-4d68-9fb9-95f980359e8c-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.815732 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9bxkw" event={"ID":"1898d5da-0166-4d68-9fb9-95f980359e8c","Type":"ContainerDied","Data":"4d6e6fb3f13baef104dfbc646e67bec3ed5f81c52da711111f67c8c4acf2d241"} Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.816005 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d6e6fb3f13baef104dfbc646e67bec3ed5f81c52da711111f67c8c4acf2d241" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.815833 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9bxkw" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.946992 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:10:32 crc kubenswrapper[4863]: E1205 08:10:32.947322 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1898d5da-0166-4d68-9fb9-95f980359e8c" containerName="neutron-db-sync" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.947338 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1898d5da-0166-4d68-9fb9-95f980359e8c" containerName="neutron-db-sync" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.947521 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1898d5da-0166-4d68-9fb9-95f980359e8c" containerName="neutron-db-sync" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.948420 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:32 crc kubenswrapper[4863]: I1205 08:10:32.966573 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.057521 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ccd7c9685-8g5gr"] Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.062908 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.065354 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.065576 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.065627 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lgrvx" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.073745 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ccd7c9685-8g5gr"] Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.097358 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.097578 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.097676 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tnc5\" (UniqueName: \"kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.097709 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.097765 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199131 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-combined-ca-bundle\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199181 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlhzd\" (UniqueName: \"kubernetes.io/projected/bc84c40c-704a-41c4-a018-c74351bc4227-kube-api-access-mlhzd\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199230 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199266 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199330 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tnc5\" (UniqueName: \"kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.199353 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-httpd-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.200214 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.200840 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.200902 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.201091 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.201450 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.201982 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.219126 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tnc5\" (UniqueName: \"kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5\") pod \"dnsmasq-dns-cd647cb65-2shkp\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.266151 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.302608 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-httpd-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.302748 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-combined-ca-bundle\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.302778 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlhzd\" (UniqueName: \"kubernetes.io/projected/bc84c40c-704a-41c4-a018-c74351bc4227-kube-api-access-mlhzd\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.302839 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.309194 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-combined-ca-bundle\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.314307 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.318547 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc84c40c-704a-41c4-a018-c74351bc4227-httpd-config\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.328418 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlhzd\" (UniqueName: \"kubernetes.io/projected/bc84c40c-704a-41c4-a018-c74351bc4227-kube-api-access-mlhzd\") pod \"neutron-ccd7c9685-8g5gr\" (UID: \"bc84c40c-704a-41c4-a018-c74351bc4227\") " pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.426556 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.744091 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:10:33 crc kubenswrapper[4863]: W1205 08:10:33.750847 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a34b404_7a0c_4546_940b_cc77e12ccfcd.slice/crio-b5f0fe6b6895a9c74f756b73b053b1787394a19d542bae235a7eb590d0d04603 WatchSource:0}: Error finding container b5f0fe6b6895a9c74f756b73b053b1787394a19d542bae235a7eb590d0d04603: Status 404 returned error can't find the container with id b5f0fe6b6895a9c74f756b73b053b1787394a19d542bae235a7eb590d0d04603 Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.828912 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" event={"ID":"6a34b404-7a0c-4546-940b-cc77e12ccfcd","Type":"ContainerStarted","Data":"b5f0fe6b6895a9c74f756b73b053b1787394a19d542bae235a7eb590d0d04603"} Dec 05 08:10:33 crc kubenswrapper[4863]: I1205 08:10:33.969452 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ccd7c9685-8g5gr"] Dec 05 08:10:33 crc kubenswrapper[4863]: W1205 08:10:33.988399 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc84c40c_704a_41c4_a018_c74351bc4227.slice/crio-423063e44850257594ef6a71fe7d6026c22d95d2c9e649d9c2d049fe65f72d7c WatchSource:0}: Error finding container 423063e44850257594ef6a71fe7d6026c22d95d2c9e649d9c2d049fe65f72d7c: Status 404 returned error can't find the container with id 423063e44850257594ef6a71fe7d6026c22d95d2c9e649d9c2d049fe65f72d7c Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.836198 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ccd7c9685-8g5gr" event={"ID":"bc84c40c-704a-41c4-a018-c74351bc4227","Type":"ContainerStarted","Data":"1692a086b0474fec25cf82b60d2ec2e527627701a1365aebfacfbfe76677df12"} Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.836524 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ccd7c9685-8g5gr" event={"ID":"bc84c40c-704a-41c4-a018-c74351bc4227","Type":"ContainerStarted","Data":"fe586cadc70cadd53896760086cc3ef24ad08c46767ee74ba35fa66208389193"} Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.836536 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ccd7c9685-8g5gr" event={"ID":"bc84c40c-704a-41c4-a018-c74351bc4227","Type":"ContainerStarted","Data":"423063e44850257594ef6a71fe7d6026c22d95d2c9e649d9c2d049fe65f72d7c"} Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.836574 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.838650 4863 generic.go:334] "Generic (PLEG): container finished" podID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerID="b7195861317196c1d38358b91923f8f8b898bb1c1d711df93b5a62078801f410" exitCode=0 Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.838735 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" event={"ID":"6a34b404-7a0c-4546-940b-cc77e12ccfcd","Type":"ContainerDied","Data":"b7195861317196c1d38358b91923f8f8b898bb1c1d711df93b5a62078801f410"} Dec 05 08:10:34 crc kubenswrapper[4863]: I1205 08:10:34.865654 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-ccd7c9685-8g5gr" podStartSLOduration=1.8656345970000001 podStartE2EDuration="1.865634597s" podCreationTimestamp="2025-12-05 08:10:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:10:34.858242397 +0000 UTC m=+5062.584239527" watchObservedRunningTime="2025-12-05 08:10:34.865634597 +0000 UTC m=+5062.591631637" Dec 05 08:10:35 crc kubenswrapper[4863]: I1205 08:10:35.852330 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" event={"ID":"6a34b404-7a0c-4546-940b-cc77e12ccfcd","Type":"ContainerStarted","Data":"fac5971c160ff854999dddb632cb1bfa247de85d8dd12189f4f7535b633d5e1d"} Dec 05 08:10:35 crc kubenswrapper[4863]: I1205 08:10:35.877737 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" podStartSLOduration=3.877720914 podStartE2EDuration="3.877720914s" podCreationTimestamp="2025-12-05 08:10:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:10:35.872832236 +0000 UTC m=+5063.598829276" watchObservedRunningTime="2025-12-05 08:10:35.877720914 +0000 UTC m=+5063.603717944" Dec 05 08:10:36 crc kubenswrapper[4863]: I1205 08:10:36.860435 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:37 crc kubenswrapper[4863]: I1205 08:10:37.602907 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:10:37 crc kubenswrapper[4863]: E1205 08:10:37.603690 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.267684 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.330060 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.330352 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="dnsmasq-dns" containerID="cri-o://1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa" gracePeriod=10 Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.802311 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.897681 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb\") pod \"4c4d255b-656c-4203-8860-1b6196eb3ac4\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.897789 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb\") pod \"4c4d255b-656c-4203-8860-1b6196eb3ac4\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.897868 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config\") pod \"4c4d255b-656c-4203-8860-1b6196eb3ac4\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.897905 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc\") pod \"4c4d255b-656c-4203-8860-1b6196eb3ac4\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.897961 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkjmw\" (UniqueName: \"kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw\") pod \"4c4d255b-656c-4203-8860-1b6196eb3ac4\" (UID: \"4c4d255b-656c-4203-8860-1b6196eb3ac4\") " Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.907602 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw" (OuterVolumeSpecName: "kube-api-access-qkjmw") pod "4c4d255b-656c-4203-8860-1b6196eb3ac4" (UID: "4c4d255b-656c-4203-8860-1b6196eb3ac4"). InnerVolumeSpecName "kube-api-access-qkjmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.947749 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4c4d255b-656c-4203-8860-1b6196eb3ac4" (UID: "4c4d255b-656c-4203-8860-1b6196eb3ac4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.949857 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config" (OuterVolumeSpecName: "config") pod "4c4d255b-656c-4203-8860-1b6196eb3ac4" (UID: "4c4d255b-656c-4203-8860-1b6196eb3ac4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951669 4863 generic.go:334] "Generic (PLEG): container finished" podID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerID="1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa" exitCode=0 Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951713 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" event={"ID":"4c4d255b-656c-4203-8860-1b6196eb3ac4","Type":"ContainerDied","Data":"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa"} Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951726 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951745 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5888d44f8f-ntw6b" event={"ID":"4c4d255b-656c-4203-8860-1b6196eb3ac4","Type":"ContainerDied","Data":"1d480ea8ece95eed9e45a38bd1761dec7446779c6482251a74dd2b224fbef902"} Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951764 4863 scope.go:117] "RemoveContainer" containerID="1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.951809 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4c4d255b-656c-4203-8860-1b6196eb3ac4" (UID: "4c4d255b-656c-4203-8860-1b6196eb3ac4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.968586 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4c4d255b-656c-4203-8860-1b6196eb3ac4" (UID: "4c4d255b-656c-4203-8860-1b6196eb3ac4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:10:43 crc kubenswrapper[4863]: I1205 08:10:43.992423 4863 scope.go:117] "RemoveContainer" containerID="6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.000048 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.000082 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.000105 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.000122 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c4d255b-656c-4203-8860-1b6196eb3ac4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.000139 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkjmw\" (UniqueName: \"kubernetes.io/projected/4c4d255b-656c-4203-8860-1b6196eb3ac4-kube-api-access-qkjmw\") on node \"crc\" DevicePath \"\"" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.012244 4863 scope.go:117] "RemoveContainer" containerID="1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa" Dec 05 08:10:44 crc kubenswrapper[4863]: E1205 08:10:44.012696 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa\": container with ID starting with 1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa not found: ID does not exist" containerID="1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.012761 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa"} err="failed to get container status \"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa\": rpc error: code = NotFound desc = could not find container \"1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa\": container with ID starting with 1cbde4cdb5ae605aa206af6fc7eae030f43f003c81e4ac0fba27fdb4a7a84efa not found: ID does not exist" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.012790 4863 scope.go:117] "RemoveContainer" containerID="6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2" Dec 05 08:10:44 crc kubenswrapper[4863]: E1205 08:10:44.013226 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2\": container with ID starting with 6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2 not found: ID does not exist" containerID="6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.013270 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2"} err="failed to get container status \"6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2\": rpc error: code = NotFound desc = could not find container \"6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2\": container with ID starting with 6471156a5092bd5007589e73c4c9a811ca54972678841d3106f56047730f70a2 not found: ID does not exist" Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.282648 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.290205 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5888d44f8f-ntw6b"] Dec 05 08:10:44 crc kubenswrapper[4863]: I1205 08:10:44.611785 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" path="/var/lib/kubelet/pods/4c4d255b-656c-4203-8860-1b6196eb3ac4/volumes" Dec 05 08:10:50 crc kubenswrapper[4863]: I1205 08:10:50.602263 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:10:50 crc kubenswrapper[4863]: E1205 08:10:50.602994 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:03 crc kubenswrapper[4863]: I1205 08:11:03.435414 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-ccd7c9685-8g5gr" Dec 05 08:11:03 crc kubenswrapper[4863]: I1205 08:11:03.601915 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:11:03 crc kubenswrapper[4863]: E1205 08:11:03.602116 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.315070 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-g948k"] Dec 05 08:11:10 crc kubenswrapper[4863]: E1205 08:11:10.316496 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="init" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.316515 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="init" Dec 05 08:11:10 crc kubenswrapper[4863]: E1205 08:11:10.316537 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="dnsmasq-dns" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.316550 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="dnsmasq-dns" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.316995 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4d255b-656c-4203-8860-1b6196eb3ac4" containerName="dnsmasq-dns" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.318226 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.345725 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g948k"] Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.366585 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6875k\" (UniqueName: \"kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.366825 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.408774 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2374-account-create-update-g9tph"] Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.409976 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.416492 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2374-account-create-update-g9tph"] Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.416547 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.468771 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.468852 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6875k\" (UniqueName: \"kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.468928 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.468960 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5rt4\" (UniqueName: \"kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.469818 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.487041 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6875k\" (UniqueName: \"kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k\") pod \"glance-db-create-g948k\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.570567 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.570978 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5rt4\" (UniqueName: \"kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.571335 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.587115 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5rt4\" (UniqueName: \"kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4\") pod \"glance-2374-account-create-update-g9tph\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.648426 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g948k" Dec 05 08:11:10 crc kubenswrapper[4863]: I1205 08:11:10.734095 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:11 crc kubenswrapper[4863]: I1205 08:11:11.170580 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g948k"] Dec 05 08:11:11 crc kubenswrapper[4863]: I1205 08:11:11.210911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g948k" event={"ID":"e2e6d82a-c499-4bad-a79a-1730a74162db","Type":"ContainerStarted","Data":"ba8267080fc840bae858537866fbde250c7185de63b35955352c528ac343b91a"} Dec 05 08:11:11 crc kubenswrapper[4863]: I1205 08:11:11.249123 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2374-account-create-update-g9tph"] Dec 05 08:11:11 crc kubenswrapper[4863]: W1205 08:11:11.254185 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85a2d9b6_4db2_4896_84f0_ee58f4750876.slice/crio-cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42 WatchSource:0}: Error finding container cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42: Status 404 returned error can't find the container with id cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42 Dec 05 08:11:12 crc kubenswrapper[4863]: I1205 08:11:12.223035 4863 generic.go:334] "Generic (PLEG): container finished" podID="e2e6d82a-c499-4bad-a79a-1730a74162db" containerID="cdc5fea231d501aafe34503c4b547233475d9c3e68bd538372b432aa0453aa23" exitCode=0 Dec 05 08:11:12 crc kubenswrapper[4863]: I1205 08:11:12.223124 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g948k" event={"ID":"e2e6d82a-c499-4bad-a79a-1730a74162db","Type":"ContainerDied","Data":"cdc5fea231d501aafe34503c4b547233475d9c3e68bd538372b432aa0453aa23"} Dec 05 08:11:12 crc kubenswrapper[4863]: I1205 08:11:12.225075 4863 generic.go:334] "Generic (PLEG): container finished" podID="85a2d9b6-4db2-4896-84f0-ee58f4750876" containerID="9d95fce929aa765c1942593067ba192e63e4400d6a05a590c53a5fe4321310d4" exitCode=0 Dec 05 08:11:12 crc kubenswrapper[4863]: I1205 08:11:12.225130 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2374-account-create-update-g9tph" event={"ID":"85a2d9b6-4db2-4896-84f0-ee58f4750876","Type":"ContainerDied","Data":"9d95fce929aa765c1942593067ba192e63e4400d6a05a590c53a5fe4321310d4"} Dec 05 08:11:12 crc kubenswrapper[4863]: I1205 08:11:12.225177 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2374-account-create-update-g9tph" event={"ID":"85a2d9b6-4db2-4896-84f0-ee58f4750876","Type":"ContainerStarted","Data":"cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42"} Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.696093 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.703654 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g948k" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.723744 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts\") pod \"85a2d9b6-4db2-4896-84f0-ee58f4750876\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.723843 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5rt4\" (UniqueName: \"kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4\") pod \"85a2d9b6-4db2-4896-84f0-ee58f4750876\" (UID: \"85a2d9b6-4db2-4896-84f0-ee58f4750876\") " Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.725056 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85a2d9b6-4db2-4896-84f0-ee58f4750876" (UID: "85a2d9b6-4db2-4896-84f0-ee58f4750876"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.726996 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85a2d9b6-4db2-4896-84f0-ee58f4750876-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.732348 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4" (OuterVolumeSpecName: "kube-api-access-j5rt4") pod "85a2d9b6-4db2-4896-84f0-ee58f4750876" (UID: "85a2d9b6-4db2-4896-84f0-ee58f4750876"). InnerVolumeSpecName "kube-api-access-j5rt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.827717 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6875k\" (UniqueName: \"kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k\") pod \"e2e6d82a-c499-4bad-a79a-1730a74162db\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.827967 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts\") pod \"e2e6d82a-c499-4bad-a79a-1730a74162db\" (UID: \"e2e6d82a-c499-4bad-a79a-1730a74162db\") " Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.828368 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5rt4\" (UniqueName: \"kubernetes.io/projected/85a2d9b6-4db2-4896-84f0-ee58f4750876-kube-api-access-j5rt4\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.828706 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2e6d82a-c499-4bad-a79a-1730a74162db" (UID: "e2e6d82a-c499-4bad-a79a-1730a74162db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.830538 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k" (OuterVolumeSpecName: "kube-api-access-6875k") pod "e2e6d82a-c499-4bad-a79a-1730a74162db" (UID: "e2e6d82a-c499-4bad-a79a-1730a74162db"). InnerVolumeSpecName "kube-api-access-6875k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.929855 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6875k\" (UniqueName: \"kubernetes.io/projected/e2e6d82a-c499-4bad-a79a-1730a74162db-kube-api-access-6875k\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:13 crc kubenswrapper[4863]: I1205 08:11:13.929903 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2e6d82a-c499-4bad-a79a-1730a74162db-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.249136 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g948k" event={"ID":"e2e6d82a-c499-4bad-a79a-1730a74162db","Type":"ContainerDied","Data":"ba8267080fc840bae858537866fbde250c7185de63b35955352c528ac343b91a"} Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.249529 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba8267080fc840bae858537866fbde250c7185de63b35955352c528ac343b91a" Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.249176 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g948k" Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.251714 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2374-account-create-update-g9tph" event={"ID":"85a2d9b6-4db2-4896-84f0-ee58f4750876","Type":"ContainerDied","Data":"cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42"} Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.251764 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc425216f2dc406c4e6bc9a5938f37343590150e2e929ec8a8324dcdd748ee42" Dec 05 08:11:14 crc kubenswrapper[4863]: I1205 08:11:14.251885 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2374-account-create-update-g9tph" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.695715 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-vbfft"] Dec 05 08:11:15 crc kubenswrapper[4863]: E1205 08:11:15.696427 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e6d82a-c499-4bad-a79a-1730a74162db" containerName="mariadb-database-create" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.696455 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e6d82a-c499-4bad-a79a-1730a74162db" containerName="mariadb-database-create" Dec 05 08:11:15 crc kubenswrapper[4863]: E1205 08:11:15.696537 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a2d9b6-4db2-4896-84f0-ee58f4750876" containerName="mariadb-account-create-update" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.696552 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a2d9b6-4db2-4896-84f0-ee58f4750876" containerName="mariadb-account-create-update" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.696909 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a2d9b6-4db2-4896-84f0-ee58f4750876" containerName="mariadb-account-create-update" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.696963 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e6d82a-c499-4bad-a79a-1730a74162db" containerName="mariadb-database-create" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.698083 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.699859 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-pkhn6" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.700392 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.708717 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vbfft"] Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.758212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.758297 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.758323 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.758344 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9cbh\" (UniqueName: \"kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.860222 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.860308 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.860334 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.860358 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9cbh\" (UniqueName: \"kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.866058 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.867420 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.868667 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:15 crc kubenswrapper[4863]: I1205 08:11:15.877726 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9cbh\" (UniqueName: \"kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh\") pod \"glance-db-sync-vbfft\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:16 crc kubenswrapper[4863]: I1205 08:11:16.029369 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:16 crc kubenswrapper[4863]: I1205 08:11:16.377290 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vbfft"] Dec 05 08:11:17 crc kubenswrapper[4863]: I1205 08:11:17.283322 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vbfft" event={"ID":"862ddeea-c33e-4ac2-99b0-476bb6451fbe","Type":"ContainerStarted","Data":"b62019f731e2fff98c444a23e274aaf7bdfe43a3383ece713a995f3902d32aad"} Dec 05 08:11:17 crc kubenswrapper[4863]: I1205 08:11:17.602017 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:11:17 crc kubenswrapper[4863]: E1205 08:11:17.602362 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:28 crc kubenswrapper[4863]: I1205 08:11:28.651525 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:11:28 crc kubenswrapper[4863]: E1205 08:11:28.652347 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:35 crc kubenswrapper[4863]: I1205 08:11:35.456373 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vbfft" event={"ID":"862ddeea-c33e-4ac2-99b0-476bb6451fbe","Type":"ContainerStarted","Data":"ab5afe535043e2f2ea675a29ba5891fdce9e0bd333259f9f96d87994794ce3b8"} Dec 05 08:11:35 crc kubenswrapper[4863]: I1205 08:11:35.474121 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-vbfft" podStartSLOduration=2.650450408 podStartE2EDuration="20.474105595s" podCreationTimestamp="2025-12-05 08:11:15 +0000 UTC" firstStartedPulling="2025-12-05 08:11:16.384426618 +0000 UTC m=+5104.110423658" lastFinishedPulling="2025-12-05 08:11:34.208081805 +0000 UTC m=+5121.934078845" observedRunningTime="2025-12-05 08:11:35.470823915 +0000 UTC m=+5123.196820965" watchObservedRunningTime="2025-12-05 08:11:35.474105595 +0000 UTC m=+5123.200102635" Dec 05 08:11:38 crc kubenswrapper[4863]: I1205 08:11:38.487092 4863 generic.go:334] "Generic (PLEG): container finished" podID="862ddeea-c33e-4ac2-99b0-476bb6451fbe" containerID="ab5afe535043e2f2ea675a29ba5891fdce9e0bd333259f9f96d87994794ce3b8" exitCode=0 Dec 05 08:11:38 crc kubenswrapper[4863]: I1205 08:11:38.487183 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vbfft" event={"ID":"862ddeea-c33e-4ac2-99b0-476bb6451fbe","Type":"ContainerDied","Data":"ab5afe535043e2f2ea675a29ba5891fdce9e0bd333259f9f96d87994794ce3b8"} Dec 05 08:11:39 crc kubenswrapper[4863]: I1205 08:11:39.602338 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:11:39 crc kubenswrapper[4863]: E1205 08:11:39.603134 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.052776 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.171513 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle\") pod \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.171631 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data\") pod \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.171672 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data\") pod \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.171696 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9cbh\" (UniqueName: \"kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh\") pod \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\" (UID: \"862ddeea-c33e-4ac2-99b0-476bb6451fbe\") " Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.182341 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh" (OuterVolumeSpecName: "kube-api-access-s9cbh") pod "862ddeea-c33e-4ac2-99b0-476bb6451fbe" (UID: "862ddeea-c33e-4ac2-99b0-476bb6451fbe"). InnerVolumeSpecName "kube-api-access-s9cbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.189618 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "862ddeea-c33e-4ac2-99b0-476bb6451fbe" (UID: "862ddeea-c33e-4ac2-99b0-476bb6451fbe"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.209291 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "862ddeea-c33e-4ac2-99b0-476bb6451fbe" (UID: "862ddeea-c33e-4ac2-99b0-476bb6451fbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.239831 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data" (OuterVolumeSpecName: "config-data") pod "862ddeea-c33e-4ac2-99b0-476bb6451fbe" (UID: "862ddeea-c33e-4ac2-99b0-476bb6451fbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.273531 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.273562 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.273574 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/862ddeea-c33e-4ac2-99b0-476bb6451fbe-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.273584 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9cbh\" (UniqueName: \"kubernetes.io/projected/862ddeea-c33e-4ac2-99b0-476bb6451fbe-kube-api-access-s9cbh\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.507310 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vbfft" event={"ID":"862ddeea-c33e-4ac2-99b0-476bb6451fbe","Type":"ContainerDied","Data":"b62019f731e2fff98c444a23e274aaf7bdfe43a3383ece713a995f3902d32aad"} Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.507353 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b62019f731e2fff98c444a23e274aaf7bdfe43a3383ece713a995f3902d32aad" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.507416 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vbfft" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.849165 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:40 crc kubenswrapper[4863]: E1205 08:11:40.849606 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="862ddeea-c33e-4ac2-99b0-476bb6451fbe" containerName="glance-db-sync" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.849622 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="862ddeea-c33e-4ac2-99b0-476bb6451fbe" containerName="glance-db-sync" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.849833 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="862ddeea-c33e-4ac2-99b0-476bb6451fbe" containerName="glance-db-sync" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.850958 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.855043 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.855094 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-pkhn6" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.855269 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.858870 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.868277 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.951087 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.952628 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.976332 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997038 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997152 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997182 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997220 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997280 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997322 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:40 crc kubenswrapper[4863]: I1205 08:11:40.997355 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqnd9\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.036594 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.040308 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.047565 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.073598 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099561 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099744 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099764 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099829 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099873 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkdpg\" (UniqueName: \"kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099898 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099918 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqnd9\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.099976 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.100000 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.100035 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.100702 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.100868 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.105262 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.105701 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.109763 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.117231 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqnd9\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.135693 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201600 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201673 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkdpg\" (UniqueName: \"kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201734 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201763 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfmlw\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201796 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201816 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201924 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201955 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.201980 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.202017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.203027 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.203719 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.204397 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.205460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.216004 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.219973 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkdpg\" (UniqueName: \"kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg\") pod \"dnsmasq-dns-69fb7f47c9-p96k6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.283187 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303650 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303750 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfmlw\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303790 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303833 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303869 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.303925 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.304513 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.304812 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.311587 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.323774 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.324370 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.328047 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.334491 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfmlw\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw\") pod \"glance-default-internal-api-0\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.372704 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.830602 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.870809 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:11:41 crc kubenswrapper[4863]: W1205 08:11:41.873148 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c8e782_8d86_41a8_9879_af3cb6fe9cc6.slice/crio-5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473 WatchSource:0}: Error finding container 5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473: Status 404 returned error can't find the container with id 5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473 Dec 05 08:11:41 crc kubenswrapper[4863]: I1205 08:11:41.897785 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.042637 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:42 crc kubenswrapper[4863]: W1205 08:11:42.072762 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16717909_cb07_4f20_9d68_1518452ae0d9.slice/crio-ecef044aac1a41e4a713caf02cd154658f1614297c6c26921a0f9d090865bef3 WatchSource:0}: Error finding container ecef044aac1a41e4a713caf02cd154658f1614297c6c26921a0f9d090865bef3: Status 404 returned error can't find the container with id ecef044aac1a41e4a713caf02cd154658f1614297c6c26921a0f9d090865bef3 Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.580958 4863 generic.go:334] "Generic (PLEG): container finished" podID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerID="be8e0b8ea7e2b7ec3fc5170796e2738a6dbf2266f87ce31e6ee9fe19634c8d13" exitCode=0 Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.581056 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" event={"ID":"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6","Type":"ContainerDied","Data":"be8e0b8ea7e2b7ec3fc5170796e2738a6dbf2266f87ce31e6ee9fe19634c8d13"} Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.581378 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" event={"ID":"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6","Type":"ContainerStarted","Data":"5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473"} Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.583003 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerStarted","Data":"8c1819faf7332dc3d39309177fd3541e805171ca40243fb6c2c16923c595bb02"} Dec 05 08:11:42 crc kubenswrapper[4863]: I1205 08:11:42.584740 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerStarted","Data":"ecef044aac1a41e4a713caf02cd154658f1614297c6c26921a0f9d090865bef3"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.594152 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerStarted","Data":"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.594550 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerStarted","Data":"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.594323 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-httpd" containerID="cri-o://e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" gracePeriod=30 Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.594243 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-log" containerID="cri-o://07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" gracePeriod=30 Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.596678 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerStarted","Data":"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.596748 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerStarted","Data":"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.598740 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" event={"ID":"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6","Type":"ContainerStarted","Data":"45fb708c2a9c45d69560c253f03ec7d2db3454fa73adbea056da3fac66acaa59"} Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.598903 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.620883 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.6208612000000002 podStartE2EDuration="3.6208612s" podCreationTimestamp="2025-12-05 08:11:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:11:43.616950305 +0000 UTC m=+5131.342947345" watchObservedRunningTime="2025-12-05 08:11:43.6208612 +0000 UTC m=+5131.346858240" Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.643709 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.643690185 podStartE2EDuration="3.643690185s" podCreationTimestamp="2025-12-05 08:11:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:11:43.640825624 +0000 UTC m=+5131.366822664" watchObservedRunningTime="2025-12-05 08:11:43.643690185 +0000 UTC m=+5131.369687225" Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.971929 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" podStartSLOduration=3.971910278 podStartE2EDuration="3.971910278s" podCreationTimestamp="2025-12-05 08:11:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:11:43.662204454 +0000 UTC m=+5131.388201514" watchObservedRunningTime="2025-12-05 08:11:43.971910278 +0000 UTC m=+5131.697907318" Dec 05 08:11:43 crc kubenswrapper[4863]: I1205 08:11:43.982556 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.239404 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.362867 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.362931 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.362957 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqnd9\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.362994 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.363074 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.363110 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.363149 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts\") pod \"fc468989-0d28-4d24-bd7a-5493135e2b06\" (UID: \"fc468989-0d28-4d24-bd7a-5493135e2b06\") " Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.363465 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.363678 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs" (OuterVolumeSpecName: "logs") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.375676 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9" (OuterVolumeSpecName: "kube-api-access-vqnd9") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "kube-api-access-vqnd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.383869 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts" (OuterVolumeSpecName: "scripts") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.384143 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph" (OuterVolumeSpecName: "ceph") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.389400 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.440838 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data" (OuterVolumeSpecName: "config-data") pod "fc468989-0d28-4d24-bd7a-5493135e2b06" (UID: "fc468989-0d28-4d24-bd7a-5493135e2b06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464483 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464515 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464526 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464534 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc468989-0d28-4d24-bd7a-5493135e2b06-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464545 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464553 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqnd9\" (UniqueName: \"kubernetes.io/projected/fc468989-0d28-4d24-bd7a-5493135e2b06-kube-api-access-vqnd9\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.464562 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc468989-0d28-4d24-bd7a-5493135e2b06-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.612266 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerID="e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" exitCode=0 Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.612295 4863 generic.go:334] "Generic (PLEG): container finished" podID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerID="07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" exitCode=143 Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.612770 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.614665 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerDied","Data":"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b"} Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.614865 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerDied","Data":"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71"} Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.614990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc468989-0d28-4d24-bd7a-5493135e2b06","Type":"ContainerDied","Data":"8c1819faf7332dc3d39309177fd3541e805171ca40243fb6c2c16923c595bb02"} Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.614948 4863 scope.go:117] "RemoveContainer" containerID="e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.653221 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.657299 4863 scope.go:117] "RemoveContainer" containerID="07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.663686 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.684376 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.684662 4863 scope.go:117] "RemoveContainer" containerID="e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" Dec 05 08:11:44 crc kubenswrapper[4863]: E1205 08:11:44.684918 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-httpd" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.684946 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-httpd" Dec 05 08:11:44 crc kubenswrapper[4863]: E1205 08:11:44.684961 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-log" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.685031 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-log" Dec 05 08:11:44 crc kubenswrapper[4863]: E1205 08:11:44.685197 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b\": container with ID starting with e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b not found: ID does not exist" containerID="e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.685250 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b"} err="failed to get container status \"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b\": rpc error: code = NotFound desc = could not find container \"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b\": container with ID starting with e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b not found: ID does not exist" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.685281 4863 scope.go:117] "RemoveContainer" containerID="07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.685223 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-log" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.685342 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" containerName="glance-httpd" Dec 05 08:11:44 crc kubenswrapper[4863]: E1205 08:11:44.686188 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71\": container with ID starting with 07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71 not found: ID does not exist" containerID="07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.686213 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71"} err="failed to get container status \"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71\": rpc error: code = NotFound desc = could not find container \"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71\": container with ID starting with 07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71 not found: ID does not exist" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.686238 4863 scope.go:117] "RemoveContainer" containerID="e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.686544 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.686806 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b"} err="failed to get container status \"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b\": rpc error: code = NotFound desc = could not find container \"e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b\": container with ID starting with e2e629841da8a6f800709aa898ac1577ff222c14d0659e13dde8a9eb3ba5179b not found: ID does not exist" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.686829 4863 scope.go:117] "RemoveContainer" containerID="07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.687235 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71"} err="failed to get container status \"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71\": rpc error: code = NotFound desc = could not find container \"07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71\": container with ID starting with 07b3b98432d3fab5a3e6852e8e4066a00cd0049dc02e67e4ed27984fe0b61c71 not found: ID does not exist" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.697706 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.706983 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.769240 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.769618 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.769648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.769718 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.769748 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.770030 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.770106 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgjz5\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871608 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgjz5\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871699 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871725 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871748 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871780 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871797 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.871886 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.872403 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.872558 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.877181 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.877912 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.877928 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.879098 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:44 crc kubenswrapper[4863]: I1205 08:11:44.888514 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgjz5\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5\") pod \"glance-default-external-api-0\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " pod="openstack/glance-default-external-api-0" Dec 05 08:11:45 crc kubenswrapper[4863]: I1205 08:11:45.020647 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:11:45 crc kubenswrapper[4863]: I1205 08:11:45.615142 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:11:45 crc kubenswrapper[4863]: W1205 08:11:45.617863 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8fb13c4_b688_4f81_a02a_f7c5b10aa4e1.slice/crio-e8f9bdce3112713fb501bbe6cda764fb04c691b84e96f6688cb25ca0a6010a87 WatchSource:0}: Error finding container e8f9bdce3112713fb501bbe6cda764fb04c691b84e96f6688cb25ca0a6010a87: Status 404 returned error can't find the container with id e8f9bdce3112713fb501bbe6cda764fb04c691b84e96f6688cb25ca0a6010a87 Dec 05 08:11:45 crc kubenswrapper[4863]: I1205 08:11:45.625634 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-log" containerID="cri-o://2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" gracePeriod=30 Dec 05 08:11:45 crc kubenswrapper[4863]: I1205 08:11:45.625708 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-httpd" containerID="cri-o://ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" gracePeriod=30 Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.287488 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.399909 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.399975 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400013 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400071 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400177 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfmlw\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400220 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400254 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph\") pod \"16717909-cb07-4f20-9d68-1518452ae0d9\" (UID: \"16717909-cb07-4f20-9d68-1518452ae0d9\") " Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400510 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs" (OuterVolumeSpecName: "logs") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.400759 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.401018 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.403715 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph" (OuterVolumeSpecName: "ceph") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.404374 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts" (OuterVolumeSpecName: "scripts") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.404955 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw" (OuterVolumeSpecName: "kube-api-access-nfmlw") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "kube-api-access-nfmlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.436719 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.448950 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data" (OuterVolumeSpecName: "config-data") pod "16717909-cb07-4f20-9d68-1518452ae0d9" (UID: "16717909-cb07-4f20-9d68-1518452ae0d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502273 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfmlw\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-kube-api-access-nfmlw\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502305 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502315 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16717909-cb07-4f20-9d68-1518452ae0d9-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502322 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502330 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16717909-cb07-4f20-9d68-1518452ae0d9-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.502339 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16717909-cb07-4f20-9d68-1518452ae0d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.620451 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc468989-0d28-4d24-bd7a-5493135e2b06" path="/var/lib/kubelet/pods/fc468989-0d28-4d24-bd7a-5493135e2b06/volumes" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638889 4863 generic.go:334] "Generic (PLEG): container finished" podID="16717909-cb07-4f20-9d68-1518452ae0d9" containerID="ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" exitCode=0 Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638917 4863 generic.go:334] "Generic (PLEG): container finished" podID="16717909-cb07-4f20-9d68-1518452ae0d9" containerID="2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" exitCode=143 Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638926 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerDied","Data":"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e"} Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638965 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerDied","Data":"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1"} Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638971 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.639002 4863 scope.go:117] "RemoveContainer" containerID="ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.638982 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"16717909-cb07-4f20-9d68-1518452ae0d9","Type":"ContainerDied","Data":"ecef044aac1a41e4a713caf02cd154658f1614297c6c26921a0f9d090865bef3"} Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.640705 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerStarted","Data":"3fe58194689ca75b14d6cb2b37b19b0755b691b5cd77b6c6d730b789866b478f"} Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.640735 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerStarted","Data":"e8f9bdce3112713fb501bbe6cda764fb04c691b84e96f6688cb25ca0a6010a87"} Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.666884 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.670415 4863 scope.go:117] "RemoveContainer" containerID="2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.677973 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.689685 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:46 crc kubenswrapper[4863]: E1205 08:11:46.690047 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-log" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.690071 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-log" Dec 05 08:11:46 crc kubenswrapper[4863]: E1205 08:11:46.690115 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-httpd" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.690124 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-httpd" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.690311 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-log" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.690351 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" containerName="glance-httpd" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.691371 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.696955 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.703798 4863 scope.go:117] "RemoveContainer" containerID="ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" Dec 05 08:11:46 crc kubenswrapper[4863]: E1205 08:11:46.706109 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e\": container with ID starting with ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e not found: ID does not exist" containerID="ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.706170 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e"} err="failed to get container status \"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e\": rpc error: code = NotFound desc = could not find container \"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e\": container with ID starting with ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e not found: ID does not exist" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.706202 4863 scope.go:117] "RemoveContainer" containerID="2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" Dec 05 08:11:46 crc kubenswrapper[4863]: E1205 08:11:46.706923 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1\": container with ID starting with 2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1 not found: ID does not exist" containerID="2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.706952 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1"} err="failed to get container status \"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1\": rpc error: code = NotFound desc = could not find container \"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1\": container with ID starting with 2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1 not found: ID does not exist" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.706970 4863 scope.go:117] "RemoveContainer" containerID="ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.707312 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e"} err="failed to get container status \"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e\": rpc error: code = NotFound desc = could not find container \"ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e\": container with ID starting with ba2682eb9243ce0bfa17273dea1368c76e5f538a4b4f78f65cda525d0ab7c61e not found: ID does not exist" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.707339 4863 scope.go:117] "RemoveContainer" containerID="2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.708365 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.711623 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1"} err="failed to get container status \"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1\": rpc error: code = NotFound desc = could not find container \"2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1\": container with ID starting with 2d726561aa18aa1ca2f36ca1e22be3c9b368f7e0d8e3be067515f278f58d06a1 not found: ID does not exist" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.815020 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.815090 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.815140 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck6dg\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.815193 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.815236 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.816865 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.816958 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.918937 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck6dg\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919333 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919449 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919577 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.919620 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.920070 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.921807 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.925277 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.927950 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.932208 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.935458 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:46 crc kubenswrapper[4863]: I1205 08:11:46.943189 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck6dg\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg\") pod \"glance-default-internal-api-0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:11:47 crc kubenswrapper[4863]: I1205 08:11:47.036227 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:47 crc kubenswrapper[4863]: I1205 08:11:47.536372 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:11:47 crc kubenswrapper[4863]: I1205 08:11:47.654757 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerStarted","Data":"b9ac35a9de351187f97e3eaa6733734c2ec440e3b830595c2cff2244969af543"} Dec 05 08:11:47 crc kubenswrapper[4863]: I1205 08:11:47.657269 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerStarted","Data":"1b7fc830435442fcca8fdaa12a2fe860cf52dfd7d24d2bc3a015e88c10f48366"} Dec 05 08:11:48 crc kubenswrapper[4863]: I1205 08:11:48.616243 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16717909-cb07-4f20-9d68-1518452ae0d9" path="/var/lib/kubelet/pods/16717909-cb07-4f20-9d68-1518452ae0d9/volumes" Dec 05 08:11:48 crc kubenswrapper[4863]: I1205 08:11:48.676211 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerStarted","Data":"4e810214e6b7402ed65327bc66e35e74b3c9a7e8f5a8d84e09e9a04ed0a1e162"} Dec 05 08:11:48 crc kubenswrapper[4863]: I1205 08:11:48.676509 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerStarted","Data":"04323d52a4e25f7e74be5f021c0efcf981dc57e74f9da5997135290aaa2dbcf5"} Dec 05 08:11:48 crc kubenswrapper[4863]: I1205 08:11:48.694986 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.694966569 podStartE2EDuration="4.694966569s" podCreationTimestamp="2025-12-05 08:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:11:47.676764044 +0000 UTC m=+5135.402761094" watchObservedRunningTime="2025-12-05 08:11:48.694966569 +0000 UTC m=+5136.420963629" Dec 05 08:11:48 crc kubenswrapper[4863]: I1205 08:11:48.697685 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.6976777050000003 podStartE2EDuration="2.697677705s" podCreationTimestamp="2025-12-05 08:11:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:11:48.693384721 +0000 UTC m=+5136.419381771" watchObservedRunningTime="2025-12-05 08:11:48.697677705 +0000 UTC m=+5136.423674755" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.285989 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.366417 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.367985 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="dnsmasq-dns" containerID="cri-o://fac5971c160ff854999dddb632cb1bfa247de85d8dd12189f4f7535b633d5e1d" gracePeriod=10 Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.717236 4863 generic.go:334] "Generic (PLEG): container finished" podID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerID="fac5971c160ff854999dddb632cb1bfa247de85d8dd12189f4f7535b633d5e1d" exitCode=0 Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.717286 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" event={"ID":"6a34b404-7a0c-4546-940b-cc77e12ccfcd","Type":"ContainerDied","Data":"fac5971c160ff854999dddb632cb1bfa247de85d8dd12189f4f7535b633d5e1d"} Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.854970 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.924965 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config\") pod \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.925046 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc\") pod \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.925110 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb\") pod \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.925154 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb\") pod \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.925181 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tnc5\" (UniqueName: \"kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5\") pod \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\" (UID: \"6a34b404-7a0c-4546-940b-cc77e12ccfcd\") " Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.930270 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5" (OuterVolumeSpecName: "kube-api-access-9tnc5") pod "6a34b404-7a0c-4546-940b-cc77e12ccfcd" (UID: "6a34b404-7a0c-4546-940b-cc77e12ccfcd"). InnerVolumeSpecName "kube-api-access-9tnc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.963606 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6a34b404-7a0c-4546-940b-cc77e12ccfcd" (UID: "6a34b404-7a0c-4546-940b-cc77e12ccfcd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.965896 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6a34b404-7a0c-4546-940b-cc77e12ccfcd" (UID: "6a34b404-7a0c-4546-940b-cc77e12ccfcd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:51 crc kubenswrapper[4863]: I1205 08:11:51.972942 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a34b404-7a0c-4546-940b-cc77e12ccfcd" (UID: "6a34b404-7a0c-4546-940b-cc77e12ccfcd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.008176 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config" (OuterVolumeSpecName: "config") pod "6a34b404-7a0c-4546-940b-cc77e12ccfcd" (UID: "6a34b404-7a0c-4546-940b-cc77e12ccfcd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.026838 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.026876 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tnc5\" (UniqueName: \"kubernetes.io/projected/6a34b404-7a0c-4546-940b-cc77e12ccfcd-kube-api-access-9tnc5\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.026889 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.026898 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.026906 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a34b404-7a0c-4546-940b-cc77e12ccfcd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.607340 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:11:52 crc kubenswrapper[4863]: E1205 08:11:52.607590 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.726842 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" event={"ID":"6a34b404-7a0c-4546-940b-cc77e12ccfcd","Type":"ContainerDied","Data":"b5f0fe6b6895a9c74f756b73b053b1787394a19d542bae235a7eb590d0d04603"} Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.726905 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd647cb65-2shkp" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.726922 4863 scope.go:117] "RemoveContainer" containerID="fac5971c160ff854999dddb632cb1bfa247de85d8dd12189f4f7535b633d5e1d" Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.750494 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.757449 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd647cb65-2shkp"] Dec 05 08:11:52 crc kubenswrapper[4863]: I1205 08:11:52.763875 4863 scope.go:117] "RemoveContainer" containerID="b7195861317196c1d38358b91923f8f8b898bb1c1d711df93b5a62078801f410" Dec 05 08:11:54 crc kubenswrapper[4863]: I1205 08:11:54.621395 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" path="/var/lib/kubelet/pods/6a34b404-7a0c-4546-940b-cc77e12ccfcd/volumes" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.020841 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.021119 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.046694 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.055740 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.767872 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 08:11:55 crc kubenswrapper[4863]: I1205 08:11:55.767941 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.036678 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.036755 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.076382 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.088493 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.661563 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.704210 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.790315 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:57 crc kubenswrapper[4863]: I1205 08:11:57.790367 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:59 crc kubenswrapper[4863]: I1205 08:11:59.708439 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 08:11:59 crc kubenswrapper[4863]: I1205 08:11:59.766926 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 08:12:04 crc kubenswrapper[4863]: I1205 08:12:04.602151 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:12:04 crc kubenswrapper[4863]: E1205 08:12:04.603194 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.813156 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:06 crc kubenswrapper[4863]: E1205 08:12:06.814304 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="init" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.814335 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="init" Dec 05 08:12:06 crc kubenswrapper[4863]: E1205 08:12:06.814409 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="dnsmasq-dns" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.814428 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="dnsmasq-dns" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.817818 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a34b404-7a0c-4546-940b-cc77e12ccfcd" containerName="dnsmasq-dns" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.820446 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:06 crc kubenswrapper[4863]: I1205 08:12:06.823227 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.004063 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfh74\" (UniqueName: \"kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.004201 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.004322 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.106587 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfh74\" (UniqueName: \"kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.106767 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.106864 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.107345 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.107840 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.137364 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfh74\" (UniqueName: \"kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74\") pod \"certified-operators-79s2q\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.165645 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.682788 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:07 crc kubenswrapper[4863]: W1205 08:12:07.685400 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf095a204_cd5b_4081_a622_0563c573d034.slice/crio-25c886b2e4c3aef5f12358febb99fb45e876420cee8843c87d900f22b91b821e WatchSource:0}: Error finding container 25c886b2e4c3aef5f12358febb99fb45e876420cee8843c87d900f22b91b821e: Status 404 returned error can't find the container with id 25c886b2e4c3aef5f12358febb99fb45e876420cee8843c87d900f22b91b821e Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.754492 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-792qb"] Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.756176 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-792qb" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.772863 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-792qb"] Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.855496 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-58a8-account-create-update-z2xr4"] Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.857139 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.859168 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.869828 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-58a8-account-create-update-z2xr4"] Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.893943 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerStarted","Data":"25c886b2e4c3aef5f12358febb99fb45e876420cee8843c87d900f22b91b821e"} Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.919800 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntrq\" (UniqueName: \"kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:07 crc kubenswrapper[4863]: I1205 08:12:07.920097 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.021598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfn87\" (UniqueName: \"kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.021929 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.022083 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntrq\" (UniqueName: \"kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.022283 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.022980 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.041196 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntrq\" (UniqueName: \"kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq\") pod \"placement-db-create-792qb\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.125111 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfn87\" (UniqueName: \"kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.125293 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.126461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.151380 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfn87\" (UniqueName: \"kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87\") pod \"placement-58a8-account-create-update-z2xr4\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.196946 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-792qb" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.213985 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.536625 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-58a8-account-create-update-z2xr4"] Dec 05 08:12:08 crc kubenswrapper[4863]: W1205 08:12:08.538924 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ebb64a_49f7_4c64_b02b_5a021b1738fb.slice/crio-78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5 WatchSource:0}: Error finding container 78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5: Status 404 returned error can't find the container with id 78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5 Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.664992 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-792qb"] Dec 05 08:12:08 crc kubenswrapper[4863]: W1205 08:12:08.667091 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod500d36fb_e8a7_4a4a_87ab_0deb8ee411a7.slice/crio-5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e WatchSource:0}: Error finding container 5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e: Status 404 returned error can't find the container with id 5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.903235 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-792qb" event={"ID":"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7","Type":"ContainerStarted","Data":"cff358e6acff753e870f35a908dd4b26a5f2a6b845bd7837eacba3752eaeee5f"} Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.903299 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-792qb" event={"ID":"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7","Type":"ContainerStarted","Data":"5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e"} Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.906268 4863 generic.go:334] "Generic (PLEG): container finished" podID="f095a204-cd5b-4081-a622-0563c573d034" containerID="6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948" exitCode=0 Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.906343 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerDied","Data":"6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948"} Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.908298 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.910278 4863 generic.go:334] "Generic (PLEG): container finished" podID="32ebb64a-49f7-4c64-b02b-5a021b1738fb" containerID="ea89759fce048ed640eaddf232aced0af6ec4897085d8235462ea878b8930c30" exitCode=0 Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.910313 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58a8-account-create-update-z2xr4" event={"ID":"32ebb64a-49f7-4c64-b02b-5a021b1738fb","Type":"ContainerDied","Data":"ea89759fce048ed640eaddf232aced0af6ec4897085d8235462ea878b8930c30"} Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.910353 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58a8-account-create-update-z2xr4" event={"ID":"32ebb64a-49f7-4c64-b02b-5a021b1738fb","Type":"ContainerStarted","Data":"78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5"} Dec 05 08:12:08 crc kubenswrapper[4863]: I1205 08:12:08.925249 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-792qb" podStartSLOduration=1.9252321220000002 podStartE2EDuration="1.925232122s" podCreationTimestamp="2025-12-05 08:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:12:08.919850102 +0000 UTC m=+5156.645847152" watchObservedRunningTime="2025-12-05 08:12:08.925232122 +0000 UTC m=+5156.651229162" Dec 05 08:12:09 crc kubenswrapper[4863]: I1205 08:12:09.919115 4863 generic.go:334] "Generic (PLEG): container finished" podID="500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" containerID="cff358e6acff753e870f35a908dd4b26a5f2a6b845bd7837eacba3752eaeee5f" exitCode=0 Dec 05 08:12:09 crc kubenswrapper[4863]: I1205 08:12:09.919326 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-792qb" event={"ID":"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7","Type":"ContainerDied","Data":"cff358e6acff753e870f35a908dd4b26a5f2a6b845bd7837eacba3752eaeee5f"} Dec 05 08:12:09 crc kubenswrapper[4863]: I1205 08:12:09.924598 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerStarted","Data":"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11"} Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.333238 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.398135 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts\") pod \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.398252 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfn87\" (UniqueName: \"kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87\") pod \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\" (UID: \"32ebb64a-49f7-4c64-b02b-5a021b1738fb\") " Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.399398 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "32ebb64a-49f7-4c64-b02b-5a021b1738fb" (UID: "32ebb64a-49f7-4c64-b02b-5a021b1738fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.415750 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87" (OuterVolumeSpecName: "kube-api-access-kfn87") pod "32ebb64a-49f7-4c64-b02b-5a021b1738fb" (UID: "32ebb64a-49f7-4c64-b02b-5a021b1738fb"). InnerVolumeSpecName "kube-api-access-kfn87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.500366 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfn87\" (UniqueName: \"kubernetes.io/projected/32ebb64a-49f7-4c64-b02b-5a021b1738fb-kube-api-access-kfn87\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.500402 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32ebb64a-49f7-4c64-b02b-5a021b1738fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.936894 4863 generic.go:334] "Generic (PLEG): container finished" podID="f095a204-cd5b-4081-a622-0563c573d034" containerID="0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11" exitCode=0 Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.936972 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerDied","Data":"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11"} Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.938528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58a8-account-create-update-z2xr4" event={"ID":"32ebb64a-49f7-4c64-b02b-5a021b1738fb","Type":"ContainerDied","Data":"78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5"} Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.938592 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78be491e6d29e140495c9912998313907bda4bf08310378a0ce6900e6ab5deb5" Dec 05 08:12:10 crc kubenswrapper[4863]: I1205 08:12:10.938749 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58a8-account-create-update-z2xr4" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.255847 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-792qb" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.311893 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tntrq\" (UniqueName: \"kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq\") pod \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.311947 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts\") pod \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\" (UID: \"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7\") " Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.312748 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" (UID: "500d36fb-e8a7-4a4a-87ab-0deb8ee411a7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.316397 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq" (OuterVolumeSpecName: "kube-api-access-tntrq") pod "500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" (UID: "500d36fb-e8a7-4a4a-87ab-0deb8ee411a7"). InnerVolumeSpecName "kube-api-access-tntrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.413281 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tntrq\" (UniqueName: \"kubernetes.io/projected/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-kube-api-access-tntrq\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.413312 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.951005 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-792qb" event={"ID":"500d36fb-e8a7-4a4a-87ab-0deb8ee411a7","Type":"ContainerDied","Data":"5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e"} Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.951288 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fb012fddd0e225148b06861e2ce91813f1341e33c89b29d3a59752a98c50e2e" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.951034 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-792qb" Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.953549 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerStarted","Data":"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f"} Dec 05 08:12:11 crc kubenswrapper[4863]: I1205 08:12:11.985116 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-79s2q" podStartSLOduration=3.477127794 podStartE2EDuration="5.985098348s" podCreationTimestamp="2025-12-05 08:12:06 +0000 UTC" firstStartedPulling="2025-12-05 08:12:08.907984824 +0000 UTC m=+5156.633981874" lastFinishedPulling="2025-12-05 08:12:11.415955388 +0000 UTC m=+5159.141952428" observedRunningTime="2025-12-05 08:12:11.977296709 +0000 UTC m=+5159.703293759" watchObservedRunningTime="2025-12-05 08:12:11.985098348 +0000 UTC m=+5159.711095388" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.291774 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:12:13 crc kubenswrapper[4863]: E1205 08:12:13.292444 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" containerName="mariadb-database-create" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.292458 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" containerName="mariadb-database-create" Dec 05 08:12:13 crc kubenswrapper[4863]: E1205 08:12:13.292570 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ebb64a-49f7-4c64-b02b-5a021b1738fb" containerName="mariadb-account-create-update" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.292578 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ebb64a-49f7-4c64-b02b-5a021b1738fb" containerName="mariadb-account-create-update" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.292749 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ebb64a-49f7-4c64-b02b-5a021b1738fb" containerName="mariadb-account-create-update" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.292764 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" containerName="mariadb-database-create" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.293775 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.299446 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.359004 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-mwppq"] Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.360054 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.362837 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.363023 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-m2htl" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.363235 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.431044 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-mwppq"] Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449204 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449261 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449284 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9pqq\" (UniqueName: \"kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449327 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449355 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449376 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449430 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449450 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pxq2\" (UniqueName: \"kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449554 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.449622 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551140 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551215 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pxq2\" (UniqueName: \"kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551332 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551386 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551441 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551493 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551524 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9pqq\" (UniqueName: \"kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551572 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551605 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.551648 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.552031 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.552497 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.552591 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.552750 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.552912 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.560205 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.560926 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.566875 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.571230 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pxq2\" (UniqueName: \"kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2\") pod \"placement-db-sync-mwppq\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.572073 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9pqq\" (UniqueName: \"kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq\") pod \"dnsmasq-dns-58b44f6965-dkkhm\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.618365 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:13 crc kubenswrapper[4863]: I1205 08:12:13.676378 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.113970 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:12:14 crc kubenswrapper[4863]: W1205 08:12:14.125328 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac70b169_15e5_4d35_aad0_9bdc71a5a505.slice/crio-eecff0852da0258a3c727dc105740676d42b5992c2b0461f0d0c79a3f7e263fb WatchSource:0}: Error finding container eecff0852da0258a3c727dc105740676d42b5992c2b0461f0d0c79a3f7e263fb: Status 404 returned error can't find the container with id eecff0852da0258a3c727dc105740676d42b5992c2b0461f0d0c79a3f7e263fb Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.207885 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-mwppq"] Dec 05 08:12:14 crc kubenswrapper[4863]: W1205 08:12:14.208636 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7107b12_f131_4b67_b3c2_34afcdc8dd67.slice/crio-5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d WatchSource:0}: Error finding container 5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d: Status 404 returned error can't find the container with id 5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.982559 4863 generic.go:334] "Generic (PLEG): container finished" podID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerID="0a1a4acbc52d9c9810bab70b409191e7e15e099cfef4b17e034fc46fc2c1f73d" exitCode=0 Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.982655 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" event={"ID":"ac70b169-15e5-4d35-aad0-9bdc71a5a505","Type":"ContainerDied","Data":"0a1a4acbc52d9c9810bab70b409191e7e15e099cfef4b17e034fc46fc2c1f73d"} Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.983023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" event={"ID":"ac70b169-15e5-4d35-aad0-9bdc71a5a505","Type":"ContainerStarted","Data":"eecff0852da0258a3c727dc105740676d42b5992c2b0461f0d0c79a3f7e263fb"} Dec 05 08:12:14 crc kubenswrapper[4863]: I1205 08:12:14.984294 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mwppq" event={"ID":"d7107b12-f131-4b67-b3c2-34afcdc8dd67","Type":"ContainerStarted","Data":"5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d"} Dec 05 08:12:15 crc kubenswrapper[4863]: I1205 08:12:15.995632 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" event={"ID":"ac70b169-15e5-4d35-aad0-9bdc71a5a505","Type":"ContainerStarted","Data":"5190debf9deea84fef2b5106b984f1fcf8118a9346be06867692ea26fb4f2e11"} Dec 05 08:12:15 crc kubenswrapper[4863]: I1205 08:12:15.997244 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:16 crc kubenswrapper[4863]: I1205 08:12:16.019698 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" podStartSLOduration=3.019677094 podStartE2EDuration="3.019677094s" podCreationTimestamp="2025-12-05 08:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:12:16.013261508 +0000 UTC m=+5163.739258548" watchObservedRunningTime="2025-12-05 08:12:16.019677094 +0000 UTC m=+5163.745674134" Dec 05 08:12:16 crc kubenswrapper[4863]: I1205 08:12:16.603111 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:12:16 crc kubenswrapper[4863]: E1205 08:12:16.603388 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:12:17 crc kubenswrapper[4863]: I1205 08:12:17.166496 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:17 crc kubenswrapper[4863]: I1205 08:12:17.166825 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:17 crc kubenswrapper[4863]: I1205 08:12:17.214506 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:18 crc kubenswrapper[4863]: I1205 08:12:18.016740 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mwppq" event={"ID":"d7107b12-f131-4b67-b3c2-34afcdc8dd67","Type":"ContainerStarted","Data":"e12b4e2d7441e28e79b82c21835a39f9b0be725411b8a9ff2ec33c001f19759c"} Dec 05 08:12:18 crc kubenswrapper[4863]: I1205 08:12:18.078075 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:18 crc kubenswrapper[4863]: I1205 08:12:18.100390 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-mwppq" podStartSLOduration=1.557374622 podStartE2EDuration="5.100371091s" podCreationTimestamp="2025-12-05 08:12:13 +0000 UTC" firstStartedPulling="2025-12-05 08:12:14.210099495 +0000 UTC m=+5161.936096535" lastFinishedPulling="2025-12-05 08:12:17.753095964 +0000 UTC m=+5165.479093004" observedRunningTime="2025-12-05 08:12:18.053263807 +0000 UTC m=+5165.779260847" watchObservedRunningTime="2025-12-05 08:12:18.100371091 +0000 UTC m=+5165.826368131" Dec 05 08:12:18 crc kubenswrapper[4863]: I1205 08:12:18.125550 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.046271 4863 generic.go:334] "Generic (PLEG): container finished" podID="d7107b12-f131-4b67-b3c2-34afcdc8dd67" containerID="e12b4e2d7441e28e79b82c21835a39f9b0be725411b8a9ff2ec33c001f19759c" exitCode=0 Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.046331 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mwppq" event={"ID":"d7107b12-f131-4b67-b3c2-34afcdc8dd67","Type":"ContainerDied","Data":"e12b4e2d7441e28e79b82c21835a39f9b0be725411b8a9ff2ec33c001f19759c"} Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.046999 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-79s2q" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="registry-server" containerID="cri-o://3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f" gracePeriod=2 Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.529281 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.607829 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfh74\" (UniqueName: \"kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74\") pod \"f095a204-cd5b-4081-a622-0563c573d034\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.608662 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content\") pod \"f095a204-cd5b-4081-a622-0563c573d034\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.608828 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities\") pod \"f095a204-cd5b-4081-a622-0563c573d034\" (UID: \"f095a204-cd5b-4081-a622-0563c573d034\") " Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.609780 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities" (OuterVolumeSpecName: "utilities") pod "f095a204-cd5b-4081-a622-0563c573d034" (UID: "f095a204-cd5b-4081-a622-0563c573d034"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.613776 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74" (OuterVolumeSpecName: "kube-api-access-kfh74") pod "f095a204-cd5b-4081-a622-0563c573d034" (UID: "f095a204-cd5b-4081-a622-0563c573d034"). InnerVolumeSpecName "kube-api-access-kfh74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.665056 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f095a204-cd5b-4081-a622-0563c573d034" (UID: "f095a204-cd5b-4081-a622-0563c573d034"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.713622 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfh74\" (UniqueName: \"kubernetes.io/projected/f095a204-cd5b-4081-a622-0563c573d034-kube-api-access-kfh74\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.713654 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:20 crc kubenswrapper[4863]: I1205 08:12:20.713666 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095a204-cd5b-4081-a622-0563c573d034-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.059573 4863 generic.go:334] "Generic (PLEG): container finished" podID="f095a204-cd5b-4081-a622-0563c573d034" containerID="3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f" exitCode=0 Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.059642 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79s2q" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.059686 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerDied","Data":"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f"} Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.059735 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79s2q" event={"ID":"f095a204-cd5b-4081-a622-0563c573d034","Type":"ContainerDied","Data":"25c886b2e4c3aef5f12358febb99fb45e876420cee8843c87d900f22b91b821e"} Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.059764 4863 scope.go:117] "RemoveContainer" containerID="3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.099411 4863 scope.go:117] "RemoveContainer" containerID="0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.108837 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.118843 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-79s2q"] Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.130203 4863 scope.go:117] "RemoveContainer" containerID="6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.169713 4863 scope.go:117] "RemoveContainer" containerID="3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f" Dec 05 08:12:21 crc kubenswrapper[4863]: E1205 08:12:21.170317 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f\": container with ID starting with 3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f not found: ID does not exist" containerID="3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.170388 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f"} err="failed to get container status \"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f\": rpc error: code = NotFound desc = could not find container \"3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f\": container with ID starting with 3af9bf3858ce58922e646af4d2b9ac4ab3121726e9cb46369085ced3a196ca0f not found: ID does not exist" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.170432 4863 scope.go:117] "RemoveContainer" containerID="0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11" Dec 05 08:12:21 crc kubenswrapper[4863]: E1205 08:12:21.170883 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11\": container with ID starting with 0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11 not found: ID does not exist" containerID="0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.170946 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11"} err="failed to get container status \"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11\": rpc error: code = NotFound desc = could not find container \"0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11\": container with ID starting with 0489565d178ca4db61953c98e39cd34b3aab33d6ee8b2f9e35ce420a5ec5ed11 not found: ID does not exist" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.170981 4863 scope.go:117] "RemoveContainer" containerID="6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948" Dec 05 08:12:21 crc kubenswrapper[4863]: E1205 08:12:21.171314 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948\": container with ID starting with 6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948 not found: ID does not exist" containerID="6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.171376 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948"} err="failed to get container status \"6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948\": rpc error: code = NotFound desc = could not find container \"6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948\": container with ID starting with 6420471f56f70ee3048bf4935b7e0da420f2b056e10e8c2079e1fb6b89d4a948 not found: ID does not exist" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.444741 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.527756 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts\") pod \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.528428 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pxq2\" (UniqueName: \"kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2\") pod \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.528557 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle\") pod \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.528618 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data\") pod \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.528645 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs\") pod \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\" (UID: \"d7107b12-f131-4b67-b3c2-34afcdc8dd67\") " Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.529465 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs" (OuterVolumeSpecName: "logs") pod "d7107b12-f131-4b67-b3c2-34afcdc8dd67" (UID: "d7107b12-f131-4b67-b3c2-34afcdc8dd67"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.540212 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2" (OuterVolumeSpecName: "kube-api-access-6pxq2") pod "d7107b12-f131-4b67-b3c2-34afcdc8dd67" (UID: "d7107b12-f131-4b67-b3c2-34afcdc8dd67"). InnerVolumeSpecName "kube-api-access-6pxq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.544656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts" (OuterVolumeSpecName: "scripts") pod "d7107b12-f131-4b67-b3c2-34afcdc8dd67" (UID: "d7107b12-f131-4b67-b3c2-34afcdc8dd67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.553118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7107b12-f131-4b67-b3c2-34afcdc8dd67" (UID: "d7107b12-f131-4b67-b3c2-34afcdc8dd67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.559221 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data" (OuterVolumeSpecName: "config-data") pod "d7107b12-f131-4b67-b3c2-34afcdc8dd67" (UID: "d7107b12-f131-4b67-b3c2-34afcdc8dd67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.631369 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.631888 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pxq2\" (UniqueName: \"kubernetes.io/projected/d7107b12-f131-4b67-b3c2-34afcdc8dd67-kube-api-access-6pxq2\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.632075 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.632868 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7107b12-f131-4b67-b3c2-34afcdc8dd67-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:21 crc kubenswrapper[4863]: I1205 08:12:21.632968 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7107b12-f131-4b67-b3c2-34afcdc8dd67-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.079900 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mwppq" event={"ID":"d7107b12-f131-4b67-b3c2-34afcdc8dd67","Type":"ContainerDied","Data":"5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d"} Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.082057 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fe096a03b5b69efc47aac50ed3ba581f0d5abc2fb29b9215dd2a3a7dce5525d" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.080011 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mwppq" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.158501 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5f7bc9655b-tknf7"] Dec 05 08:12:22 crc kubenswrapper[4863]: E1205 08:12:22.159131 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="registry-server" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159156 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="registry-server" Dec 05 08:12:22 crc kubenswrapper[4863]: E1205 08:12:22.159184 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="extract-utilities" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159195 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="extract-utilities" Dec 05 08:12:22 crc kubenswrapper[4863]: E1205 08:12:22.159214 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="extract-content" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159221 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="extract-content" Dec 05 08:12:22 crc kubenswrapper[4863]: E1205 08:12:22.159230 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7107b12-f131-4b67-b3c2-34afcdc8dd67" containerName="placement-db-sync" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159236 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7107b12-f131-4b67-b3c2-34afcdc8dd67" containerName="placement-db-sync" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159437 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f095a204-cd5b-4081-a622-0563c573d034" containerName="registry-server" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.159468 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7107b12-f131-4b67-b3c2-34afcdc8dd67" containerName="placement-db-sync" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.160781 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.162435 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-m2htl" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.164159 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.167520 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.177576 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f7bc9655b-tknf7"] Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.345245 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-config-data\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.345364 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-combined-ca-bundle\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.345401 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-scripts\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.346503 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hg2p\" (UniqueName: \"kubernetes.io/projected/d552ffcf-15fc-41cb-9a16-632cccb1ca19-kube-api-access-2hg2p\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.346558 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d552ffcf-15fc-41cb-9a16-632cccb1ca19-logs\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.448982 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d552ffcf-15fc-41cb-9a16-632cccb1ca19-logs\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.449061 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hg2p\" (UniqueName: \"kubernetes.io/projected/d552ffcf-15fc-41cb-9a16-632cccb1ca19-kube-api-access-2hg2p\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.449152 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-config-data\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.449214 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-combined-ca-bundle\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.449250 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-scripts\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.449548 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d552ffcf-15fc-41cb-9a16-632cccb1ca19-logs\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.454661 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-config-data\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.458648 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-combined-ca-bundle\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.460050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d552ffcf-15fc-41cb-9a16-632cccb1ca19-scripts\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.472864 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hg2p\" (UniqueName: \"kubernetes.io/projected/d552ffcf-15fc-41cb-9a16-632cccb1ca19-kube-api-access-2hg2p\") pod \"placement-5f7bc9655b-tknf7\" (UID: \"d552ffcf-15fc-41cb-9a16-632cccb1ca19\") " pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.480897 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.615095 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f095a204-cd5b-4081-a622-0563c573d034" path="/var/lib/kubelet/pods/f095a204-cd5b-4081-a622-0563c573d034/volumes" Dec 05 08:12:22 crc kubenswrapper[4863]: I1205 08:12:22.927064 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f7bc9655b-tknf7"] Dec 05 08:12:23 crc kubenswrapper[4863]: I1205 08:12:23.091548 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f7bc9655b-tknf7" event={"ID":"d552ffcf-15fc-41cb-9a16-632cccb1ca19","Type":"ContainerStarted","Data":"7206f50fac867dcb8473d07793967d9412f0a44d31e96eb9b96be81867732edd"} Dec 05 08:12:23 crc kubenswrapper[4863]: I1205 08:12:23.619718 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:12:23 crc kubenswrapper[4863]: I1205 08:12:23.688547 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:12:23 crc kubenswrapper[4863]: I1205 08:12:23.688910 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="dnsmasq-dns" containerID="cri-o://45fb708c2a9c45d69560c253f03ec7d2db3454fa73adbea056da3fac66acaa59" gracePeriod=10 Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.100602 4863 generic.go:334] "Generic (PLEG): container finished" podID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerID="45fb708c2a9c45d69560c253f03ec7d2db3454fa73adbea056da3fac66acaa59" exitCode=0 Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.100688 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" event={"ID":"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6","Type":"ContainerDied","Data":"45fb708c2a9c45d69560c253f03ec7d2db3454fa73adbea056da3fac66acaa59"} Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.100911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" event={"ID":"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6","Type":"ContainerDied","Data":"5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473"} Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.100926 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f5349f7f69ed3b9728379eb0a9ce7155ec36760d0b074712918d8f20839d473" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.102354 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f7bc9655b-tknf7" event={"ID":"d552ffcf-15fc-41cb-9a16-632cccb1ca19","Type":"ContainerStarted","Data":"f2e693cf25d9bc13c734dbc20de474fc0fa59a109200982a6ef22081dad53f60"} Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.102377 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f7bc9655b-tknf7" event={"ID":"d552ffcf-15fc-41cb-9a16-632cccb1ca19","Type":"ContainerStarted","Data":"2d6cef200ec1e607751bcf6110e440c48046ba8d40b6778d0b0d61b310c3268d"} Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.102491 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.139171 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5f7bc9655b-tknf7" podStartSLOduration=2.139150378 podStartE2EDuration="2.139150378s" podCreationTimestamp="2025-12-05 08:12:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:12:24.121100529 +0000 UTC m=+5171.847097569" watchObservedRunningTime="2025-12-05 08:12:24.139150378 +0000 UTC m=+5171.865147418" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.180513 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.181269 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config\") pod \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.181973 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb\") pod \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.182032 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc\") pod \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.182114 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb\") pod \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.182165 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkdpg\" (UniqueName: \"kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg\") pod \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\" (UID: \"e9c8e782-8d86-41a8-9879-af3cb6fe9cc6\") " Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.193775 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg" (OuterVolumeSpecName: "kube-api-access-jkdpg") pod "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" (UID: "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6"). InnerVolumeSpecName "kube-api-access-jkdpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.281210 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config" (OuterVolumeSpecName: "config") pod "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" (UID: "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.284378 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkdpg\" (UniqueName: \"kubernetes.io/projected/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-kube-api-access-jkdpg\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.284410 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.287981 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" (UID: "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.312016 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" (UID: "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.335202 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" (UID: "e9c8e782-8d86-41a8-9879-af3cb6fe9cc6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.385734 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.385769 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:24 crc kubenswrapper[4863]: I1205 08:12:24.385778 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:12:25 crc kubenswrapper[4863]: I1205 08:12:25.111490 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fb7f47c9-p96k6" Dec 05 08:12:25 crc kubenswrapper[4863]: I1205 08:12:25.112150 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:25 crc kubenswrapper[4863]: I1205 08:12:25.136614 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:12:25 crc kubenswrapper[4863]: I1205 08:12:25.143454 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69fb7f47c9-p96k6"] Dec 05 08:12:26 crc kubenswrapper[4863]: I1205 08:12:26.611457 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" path="/var/lib/kubelet/pods/e9c8e782-8d86-41a8-9879-af3cb6fe9cc6/volumes" Dec 05 08:12:31 crc kubenswrapper[4863]: I1205 08:12:31.602781 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:12:31 crc kubenswrapper[4863]: E1205 08:12:31.603754 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:12:34 crc kubenswrapper[4863]: I1205 08:12:34.135725 4863 scope.go:117] "RemoveContainer" containerID="c07380f196ce9cd217ff1823b552142d6f226f6510917ab8b535c9a03e16dcef" Dec 05 08:12:44 crc kubenswrapper[4863]: I1205 08:12:44.601703 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:12:44 crc kubenswrapper[4863]: E1205 08:12:44.602414 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:12:53 crc kubenswrapper[4863]: I1205 08:12:53.695181 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:53 crc kubenswrapper[4863]: I1205 08:12:53.696033 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f7bc9655b-tknf7" Dec 05 08:12:58 crc kubenswrapper[4863]: I1205 08:12:58.602289 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:12:58 crc kubenswrapper[4863]: E1205 08:12:58.602980 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:13:12 crc kubenswrapper[4863]: I1205 08:13:12.615117 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:13:12 crc kubenswrapper[4863]: E1205 08:13:12.616146 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.489363 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-2tshl"] Dec 05 08:13:15 crc kubenswrapper[4863]: E1205 08:13:15.489927 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="init" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.489939 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="init" Dec 05 08:13:15 crc kubenswrapper[4863]: E1205 08:13:15.489961 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="dnsmasq-dns" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.489967 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="dnsmasq-dns" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.490130 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9c8e782-8d86-41a8-9879-af3cb6fe9cc6" containerName="dnsmasq-dns" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.490652 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.517447 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2tshl"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.572129 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk2pz\" (UniqueName: \"kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.572256 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.583845 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-9xr4l"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.585268 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.591108 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-9xr4l"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.674118 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql7w6\" (UniqueName: \"kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.674214 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.674250 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.674302 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk2pz\" (UniqueName: \"kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.675161 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.708195 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk2pz\" (UniqueName: \"kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz\") pod \"nova-api-db-create-2tshl\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.709683 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-07b0-account-create-update-x2zbm"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.710692 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.712681 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.719147 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-w7pv2"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.720181 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.745216 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-w7pv2"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.776584 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql7w6\" (UniqueName: \"kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.776810 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.776926 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggmjc\" (UniqueName: \"kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.777044 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm8q6\" (UniqueName: \"kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.777139 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.777238 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.777920 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-07b0-account-create-update-x2zbm"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.778577 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.808743 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql7w6\" (UniqueName: \"kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6\") pod \"nova-cell0-db-create-9xr4l\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.811072 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.885743 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggmjc\" (UniqueName: \"kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.886046 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm8q6\" (UniqueName: \"kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.886200 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.886451 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.887507 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.887507 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.895200 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-8a5f-account-create-update-jpqxj"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.896543 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.898438 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.900342 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.904766 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggmjc\" (UniqueName: \"kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc\") pod \"nova-api-07b0-account-create-update-x2zbm\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.915684 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8a5f-account-create-update-jpqxj"] Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.925107 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm8q6\" (UniqueName: \"kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6\") pod \"nova-cell1-db-create-w7pv2\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.988484 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8596p\" (UniqueName: \"kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:15 crc kubenswrapper[4863]: I1205 08:13:15.989181 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.070797 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.076234 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.089506 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8aba-account-create-update-vznb6"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.090667 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.090800 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.091001 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8596p\" (UniqueName: \"kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.091900 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.093923 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.098455 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8aba-account-create-update-vznb6"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.125387 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8596p\" (UniqueName: \"kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p\") pod \"nova-cell0-8a5f-account-create-update-jpqxj\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.193677 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssw5q\" (UniqueName: \"kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.194086 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.282548 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.296632 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.297158 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssw5q\" (UniqueName: \"kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.300035 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.316540 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2tshl"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.320524 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssw5q\" (UniqueName: \"kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q\") pod \"nova-cell1-8aba-account-create-update-vznb6\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.351001 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-07b0-account-create-update-x2zbm"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.388884 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-9xr4l"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.411845 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.596373 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-w7pv2"] Dec 05 08:13:16 crc kubenswrapper[4863]: W1205 08:13:16.610349 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc25172c1_e63a_450c_beea_bfe6097c1168.slice/crio-b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7 WatchSource:0}: Error finding container b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7: Status 404 returned error can't find the container with id b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7 Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.697043 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2tshl" event={"ID":"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5","Type":"ContainerStarted","Data":"0d730b1858339edb2a3035434a38d8b8dcaa638acb8183a47bf84a4d4b2ee0dd"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.697086 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2tshl" event={"ID":"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5","Type":"ContainerStarted","Data":"5f64f3f18893e5b13c977cf6000b81f81eb8842f5a17d8ab32fb048c1bb2e501"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.699375 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-w7pv2" event={"ID":"c25172c1-e63a-450c-beea-bfe6097c1168","Type":"ContainerStarted","Data":"b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.701465 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-07b0-account-create-update-x2zbm" event={"ID":"22059a65-1c16-4835-af0c-791ed85bd701","Type":"ContainerStarted","Data":"d9a46913d3b5a23f296e05cd9a538d8af277316c57dc1fcbc10f7782d873a45c"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.701526 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-07b0-account-create-update-x2zbm" event={"ID":"22059a65-1c16-4835-af0c-791ed85bd701","Type":"ContainerStarted","Data":"983154703af20313ede27aefd16fc909f82ed2239e781baa0605b0b29c476f66"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.704753 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9xr4l" event={"ID":"05f557d8-488b-401a-9827-6210361e103e","Type":"ContainerStarted","Data":"0bca5de5abc083cd5717ec445b2b41e59a1efa151fcf1f73a71d5ca329c8b47e"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.704799 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9xr4l" event={"ID":"05f557d8-488b-401a-9827-6210361e103e","Type":"ContainerStarted","Data":"3519ed21220f059070e322210a49a67df323550509b51f7197ef108f454c3daa"} Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.720188 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-2tshl" podStartSLOduration=1.720167719 podStartE2EDuration="1.720167719s" podCreationTimestamp="2025-12-05 08:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:16.718636282 +0000 UTC m=+5224.444633322" watchObservedRunningTime="2025-12-05 08:13:16.720167719 +0000 UTC m=+5224.446164759" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.749655 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-9xr4l" podStartSLOduration=1.749630534 podStartE2EDuration="1.749630534s" podCreationTimestamp="2025-12-05 08:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:16.73997049 +0000 UTC m=+5224.465967530" watchObservedRunningTime="2025-12-05 08:13:16.749630534 +0000 UTC m=+5224.475627584" Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.770966 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8a5f-account-create-update-jpqxj"] Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.771553 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-07b0-account-create-update-x2zbm" podStartSLOduration=1.771534546 podStartE2EDuration="1.771534546s" podCreationTimestamp="2025-12-05 08:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:16.752915914 +0000 UTC m=+5224.478912974" watchObservedRunningTime="2025-12-05 08:13:16.771534546 +0000 UTC m=+5224.497531606" Dec 05 08:13:16 crc kubenswrapper[4863]: W1205 08:13:16.802918 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894c218c_8862_4e20_b273_302d259e3964.slice/crio-c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d WatchSource:0}: Error finding container c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d: Status 404 returned error can't find the container with id c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d Dec 05 08:13:16 crc kubenswrapper[4863]: I1205 08:13:16.869363 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8aba-account-create-update-vznb6"] Dec 05 08:13:16 crc kubenswrapper[4863]: W1205 08:13:16.896709 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7adaea6c_32b1_4fb5_898e_d735014b35ef.slice/crio-12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0 WatchSource:0}: Error finding container 12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0: Status 404 returned error can't find the container with id 12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.724166 4863 generic.go:334] "Generic (PLEG): container finished" podID="4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" containerID="0d730b1858339edb2a3035434a38d8b8dcaa638acb8183a47bf84a4d4b2ee0dd" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.724648 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2tshl" event={"ID":"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5","Type":"ContainerDied","Data":"0d730b1858339edb2a3035434a38d8b8dcaa638acb8183a47bf84a4d4b2ee0dd"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.729358 4863 generic.go:334] "Generic (PLEG): container finished" podID="c25172c1-e63a-450c-beea-bfe6097c1168" containerID="c17d6f9870388d661caf3c9dd879347966564b06bddf7c4098ac043dea61482f" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.729602 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-w7pv2" event={"ID":"c25172c1-e63a-450c-beea-bfe6097c1168","Type":"ContainerDied","Data":"c17d6f9870388d661caf3c9dd879347966564b06bddf7c4098ac043dea61482f"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.733736 4863 generic.go:334] "Generic (PLEG): container finished" podID="22059a65-1c16-4835-af0c-791ed85bd701" containerID="d9a46913d3b5a23f296e05cd9a538d8af277316c57dc1fcbc10f7782d873a45c" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.733869 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-07b0-account-create-update-x2zbm" event={"ID":"22059a65-1c16-4835-af0c-791ed85bd701","Type":"ContainerDied","Data":"d9a46913d3b5a23f296e05cd9a538d8af277316c57dc1fcbc10f7782d873a45c"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.737749 4863 generic.go:334] "Generic (PLEG): container finished" podID="05f557d8-488b-401a-9827-6210361e103e" containerID="0bca5de5abc083cd5717ec445b2b41e59a1efa151fcf1f73a71d5ca329c8b47e" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.737888 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9xr4l" event={"ID":"05f557d8-488b-401a-9827-6210361e103e","Type":"ContainerDied","Data":"0bca5de5abc083cd5717ec445b2b41e59a1efa151fcf1f73a71d5ca329c8b47e"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.741003 4863 generic.go:334] "Generic (PLEG): container finished" podID="894c218c-8862-4e20-b273-302d259e3964" containerID="74bb233a30890e9c078e9c47922ade2f0273e91f06c4c8fbf2a5de884ef66059" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.741077 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" event={"ID":"894c218c-8862-4e20-b273-302d259e3964","Type":"ContainerDied","Data":"74bb233a30890e9c078e9c47922ade2f0273e91f06c4c8fbf2a5de884ef66059"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.741134 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" event={"ID":"894c218c-8862-4e20-b273-302d259e3964","Type":"ContainerStarted","Data":"c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.745523 4863 generic.go:334] "Generic (PLEG): container finished" podID="7adaea6c-32b1-4fb5-898e-d735014b35ef" containerID="df7d5da99c4df9017102581407ad900bb4158494a70e5dce32fc160578235232" exitCode=0 Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.745601 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" event={"ID":"7adaea6c-32b1-4fb5-898e-d735014b35ef","Type":"ContainerDied","Data":"df7d5da99c4df9017102581407ad900bb4158494a70e5dce32fc160578235232"} Dec 05 08:13:17 crc kubenswrapper[4863]: I1205 08:13:17.745648 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" event={"ID":"7adaea6c-32b1-4fb5-898e-d735014b35ef","Type":"ContainerStarted","Data":"12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.256062 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.328255 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggmjc\" (UniqueName: \"kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc\") pod \"22059a65-1c16-4835-af0c-791ed85bd701\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.328304 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts\") pod \"22059a65-1c16-4835-af0c-791ed85bd701\" (UID: \"22059a65-1c16-4835-af0c-791ed85bd701\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.330186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "22059a65-1c16-4835-af0c-791ed85bd701" (UID: "22059a65-1c16-4835-af0c-791ed85bd701"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.338263 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc" (OuterVolumeSpecName: "kube-api-access-ggmjc") pod "22059a65-1c16-4835-af0c-791ed85bd701" (UID: "22059a65-1c16-4835-af0c-791ed85bd701"). InnerVolumeSpecName "kube-api-access-ggmjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.388136 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.394076 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.398660 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.410322 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.412424 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts\") pod \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431307 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql7w6\" (UniqueName: \"kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6\") pod \"05f557d8-488b-401a-9827-6210361e103e\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431353 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8596p\" (UniqueName: \"kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p\") pod \"894c218c-8862-4e20-b273-302d259e3964\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431381 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts\") pod \"894c218c-8862-4e20-b273-302d259e3964\" (UID: \"894c218c-8862-4e20-b273-302d259e3964\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431403 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssw5q\" (UniqueName: \"kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q\") pod \"7adaea6c-32b1-4fb5-898e-d735014b35ef\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431437 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk2pz\" (UniqueName: \"kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz\") pod \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\" (UID: \"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431535 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts\") pod \"7adaea6c-32b1-4fb5-898e-d735014b35ef\" (UID: \"7adaea6c-32b1-4fb5-898e-d735014b35ef\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431589 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm8q6\" (UniqueName: \"kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6\") pod \"c25172c1-e63a-450c-beea-bfe6097c1168\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431621 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts\") pod \"c25172c1-e63a-450c-beea-bfe6097c1168\" (UID: \"c25172c1-e63a-450c-beea-bfe6097c1168\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.431662 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts\") pod \"05f557d8-488b-401a-9827-6210361e103e\" (UID: \"05f557d8-488b-401a-9827-6210361e103e\") " Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.432674 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggmjc\" (UniqueName: \"kubernetes.io/projected/22059a65-1c16-4835-af0c-791ed85bd701-kube-api-access-ggmjc\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.432696 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22059a65-1c16-4835-af0c-791ed85bd701-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.433075 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05f557d8-488b-401a-9827-6210361e103e" (UID: "05f557d8-488b-401a-9827-6210361e103e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.436806 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz" (OuterVolumeSpecName: "kube-api-access-wk2pz") pod "4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" (UID: "4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5"). InnerVolumeSpecName "kube-api-access-wk2pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.437082 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q" (OuterVolumeSpecName: "kube-api-access-ssw5q") pod "7adaea6c-32b1-4fb5-898e-d735014b35ef" (UID: "7adaea6c-32b1-4fb5-898e-d735014b35ef"). InnerVolumeSpecName "kube-api-access-ssw5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.437272 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7adaea6c-32b1-4fb5-898e-d735014b35ef" (UID: "7adaea6c-32b1-4fb5-898e-d735014b35ef"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.437603 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6" (OuterVolumeSpecName: "kube-api-access-ql7w6") pod "05f557d8-488b-401a-9827-6210361e103e" (UID: "05f557d8-488b-401a-9827-6210361e103e"). InnerVolumeSpecName "kube-api-access-ql7w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.438138 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" (UID: "4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.438521 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "894c218c-8862-4e20-b273-302d259e3964" (UID: "894c218c-8862-4e20-b273-302d259e3964"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.438619 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c25172c1-e63a-450c-beea-bfe6097c1168" (UID: "c25172c1-e63a-450c-beea-bfe6097c1168"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.446252 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6" (OuterVolumeSpecName: "kube-api-access-sm8q6") pod "c25172c1-e63a-450c-beea-bfe6097c1168" (UID: "c25172c1-e63a-450c-beea-bfe6097c1168"). InnerVolumeSpecName "kube-api-access-sm8q6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.446387 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p" (OuterVolumeSpecName: "kube-api-access-8596p") pod "894c218c-8862-4e20-b273-302d259e3964" (UID: "894c218c-8862-4e20-b273-302d259e3964"). InnerVolumeSpecName "kube-api-access-8596p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534698 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk2pz\" (UniqueName: \"kubernetes.io/projected/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-kube-api-access-wk2pz\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534735 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7adaea6c-32b1-4fb5-898e-d735014b35ef-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534746 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm8q6\" (UniqueName: \"kubernetes.io/projected/c25172c1-e63a-450c-beea-bfe6097c1168-kube-api-access-sm8q6\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534755 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c25172c1-e63a-450c-beea-bfe6097c1168-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534765 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05f557d8-488b-401a-9827-6210361e103e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534773 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534782 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql7w6\" (UniqueName: \"kubernetes.io/projected/05f557d8-488b-401a-9827-6210361e103e-kube-api-access-ql7w6\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534791 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8596p\" (UniqueName: \"kubernetes.io/projected/894c218c-8862-4e20-b273-302d259e3964-kube-api-access-8596p\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534800 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/894c218c-8862-4e20-b273-302d259e3964-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.534808 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssw5q\" (UniqueName: \"kubernetes.io/projected/7adaea6c-32b1-4fb5-898e-d735014b35ef-kube-api-access-ssw5q\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.788723 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2tshl" event={"ID":"4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5","Type":"ContainerDied","Data":"5f64f3f18893e5b13c977cf6000b81f81eb8842f5a17d8ab32fb048c1bb2e501"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.788770 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f64f3f18893e5b13c977cf6000b81f81eb8842f5a17d8ab32fb048c1bb2e501" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.788844 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2tshl" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.792539 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-w7pv2" event={"ID":"c25172c1-e63a-450c-beea-bfe6097c1168","Type":"ContainerDied","Data":"b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.792585 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9850a65d1bcbe0175d2e9d6cd7c2761059e062f8535e83a309fdffe634975c7" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.792653 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-w7pv2" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.798154 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-07b0-account-create-update-x2zbm" event={"ID":"22059a65-1c16-4835-af0c-791ed85bd701","Type":"ContainerDied","Data":"983154703af20313ede27aefd16fc909f82ed2239e781baa0605b0b29c476f66"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.798216 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="983154703af20313ede27aefd16fc909f82ed2239e781baa0605b0b29c476f66" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.798173 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-07b0-account-create-update-x2zbm" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.801591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9xr4l" event={"ID":"05f557d8-488b-401a-9827-6210361e103e","Type":"ContainerDied","Data":"3519ed21220f059070e322210a49a67df323550509b51f7197ef108f454c3daa"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.801631 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3519ed21220f059070e322210a49a67df323550509b51f7197ef108f454c3daa" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.801691 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9xr4l" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.807987 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" event={"ID":"894c218c-8862-4e20-b273-302d259e3964","Type":"ContainerDied","Data":"c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.808036 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2487783d1efe85b57d9b7b3406781aae532c071f395054cdc494969d1c8a42d" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.808113 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8a5f-account-create-update-jpqxj" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.811088 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" event={"ID":"7adaea6c-32b1-4fb5-898e-d735014b35ef","Type":"ContainerDied","Data":"12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0"} Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.811128 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a7846960689ef7713702fab9bd307ff4f2ed5c5dc76d49891b9c17d22c1dd0" Dec 05 08:13:19 crc kubenswrapper[4863]: I1205 08:13:19.811176 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8aba-account-create-update-vznb6" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.067468 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-czmxf"] Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068164 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22059a65-1c16-4835-af0c-791ed85bd701" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068180 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="22059a65-1c16-4835-af0c-791ed85bd701" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068195 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f557d8-488b-401a-9827-6210361e103e" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068205 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f557d8-488b-401a-9827-6210361e103e" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068219 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7adaea6c-32b1-4fb5-898e-d735014b35ef" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068227 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="7adaea6c-32b1-4fb5-898e-d735014b35ef" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068248 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c25172c1-e63a-450c-beea-bfe6097c1168" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068256 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c25172c1-e63a-450c-beea-bfe6097c1168" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068271 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068279 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: E1205 08:13:21.068292 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894c218c-8862-4e20-b273-302d259e3964" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068300 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="894c218c-8862-4e20-b273-302d259e3964" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068538 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="7adaea6c-32b1-4fb5-898e-d735014b35ef" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068563 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c25172c1-e63a-450c-beea-bfe6097c1168" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068571 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="22059a65-1c16-4835-af0c-791ed85bd701" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068586 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="894c218c-8862-4e20-b273-302d259e3964" containerName="mariadb-account-create-update" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068596 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f557d8-488b-401a-9827-6210361e103e" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.068609 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" containerName="mariadb-database-create" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.069387 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.073968 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jnb5t" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.075059 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.078767 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-czmxf"] Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.083033 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.265949 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.266095 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.266181 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcfkq\" (UniqueName: \"kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.266259 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.367928 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.368029 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcfkq\" (UniqueName: \"kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.368111 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.368154 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.374203 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.374304 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.379192 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.395165 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcfkq\" (UniqueName: \"kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq\") pod \"nova-cell0-conductor-db-sync-czmxf\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:21 crc kubenswrapper[4863]: I1205 08:13:21.687528 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:22 crc kubenswrapper[4863]: I1205 08:13:22.232383 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-czmxf"] Dec 05 08:13:22 crc kubenswrapper[4863]: I1205 08:13:22.839992 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-czmxf" event={"ID":"236039b4-23cb-4694-a2f2-e6c5c8b10215","Type":"ContainerStarted","Data":"b276fab7974e9f9a32a74b5b10a11cc37372900553810b462f605a9926ff439a"} Dec 05 08:13:23 crc kubenswrapper[4863]: I1205 08:13:23.602502 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:13:23 crc kubenswrapper[4863]: E1205 08:13:23.603024 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:13:30 crc kubenswrapper[4863]: I1205 08:13:30.906303 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-czmxf" event={"ID":"236039b4-23cb-4694-a2f2-e6c5c8b10215","Type":"ContainerStarted","Data":"66e1931c38ce932f4eb9f0ce039af457f10b365d12498bb36ec54baf2a1d54e9"} Dec 05 08:13:30 crc kubenswrapper[4863]: I1205 08:13:30.940269 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-czmxf" podStartSLOduration=1.869717435 podStartE2EDuration="9.94025018s" podCreationTimestamp="2025-12-05 08:13:21 +0000 UTC" firstStartedPulling="2025-12-05 08:13:22.237145135 +0000 UTC m=+5229.963142185" lastFinishedPulling="2025-12-05 08:13:30.30767788 +0000 UTC m=+5238.033674930" observedRunningTime="2025-12-05 08:13:30.931963968 +0000 UTC m=+5238.657961018" watchObservedRunningTime="2025-12-05 08:13:30.94025018 +0000 UTC m=+5238.666247230" Dec 05 08:13:34 crc kubenswrapper[4863]: I1205 08:13:34.271030 4863 scope.go:117] "RemoveContainer" containerID="dc2f8df4d8c17393318e5e4ec239fc45d00895a000edf10aad536c08a1ce8858" Dec 05 08:13:35 crc kubenswrapper[4863]: I1205 08:13:35.977532 4863 generic.go:334] "Generic (PLEG): container finished" podID="236039b4-23cb-4694-a2f2-e6c5c8b10215" containerID="66e1931c38ce932f4eb9f0ce039af457f10b365d12498bb36ec54baf2a1d54e9" exitCode=0 Dec 05 08:13:35 crc kubenswrapper[4863]: I1205 08:13:35.977642 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-czmxf" event={"ID":"236039b4-23cb-4694-a2f2-e6c5c8b10215","Type":"ContainerDied","Data":"66e1931c38ce932f4eb9f0ce039af457f10b365d12498bb36ec54baf2a1d54e9"} Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.356816 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.483211 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle\") pod \"236039b4-23cb-4694-a2f2-e6c5c8b10215\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.483457 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data\") pod \"236039b4-23cb-4694-a2f2-e6c5c8b10215\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.483554 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts\") pod \"236039b4-23cb-4694-a2f2-e6c5c8b10215\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.483588 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcfkq\" (UniqueName: \"kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq\") pod \"236039b4-23cb-4694-a2f2-e6c5c8b10215\" (UID: \"236039b4-23cb-4694-a2f2-e6c5c8b10215\") " Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.489145 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts" (OuterVolumeSpecName: "scripts") pod "236039b4-23cb-4694-a2f2-e6c5c8b10215" (UID: "236039b4-23cb-4694-a2f2-e6c5c8b10215"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.489649 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq" (OuterVolumeSpecName: "kube-api-access-fcfkq") pod "236039b4-23cb-4694-a2f2-e6c5c8b10215" (UID: "236039b4-23cb-4694-a2f2-e6c5c8b10215"). InnerVolumeSpecName "kube-api-access-fcfkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.515161 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "236039b4-23cb-4694-a2f2-e6c5c8b10215" (UID: "236039b4-23cb-4694-a2f2-e6c5c8b10215"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.516368 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data" (OuterVolumeSpecName: "config-data") pod "236039b4-23cb-4694-a2f2-e6c5c8b10215" (UID: "236039b4-23cb-4694-a2f2-e6c5c8b10215"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.585545 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.585579 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.585589 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/236039b4-23cb-4694-a2f2-e6c5c8b10215-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.585597 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcfkq\" (UniqueName: \"kubernetes.io/projected/236039b4-23cb-4694-a2f2-e6c5c8b10215-kube-api-access-fcfkq\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:37 crc kubenswrapper[4863]: I1205 08:13:37.601748 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:13:37 crc kubenswrapper[4863]: E1205 08:13:37.602094 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.019897 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-czmxf" event={"ID":"236039b4-23cb-4694-a2f2-e6c5c8b10215","Type":"ContainerDied","Data":"b276fab7974e9f9a32a74b5b10a11cc37372900553810b462f605a9926ff439a"} Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.019937 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b276fab7974e9f9a32a74b5b10a11cc37372900553810b462f605a9926ff439a" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.019988 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-czmxf" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.127047 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:13:38 crc kubenswrapper[4863]: E1205 08:13:38.127580 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236039b4-23cb-4694-a2f2-e6c5c8b10215" containerName="nova-cell0-conductor-db-sync" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.127601 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="236039b4-23cb-4694-a2f2-e6c5c8b10215" containerName="nova-cell0-conductor-db-sync" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.127849 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="236039b4-23cb-4694-a2f2-e6c5c8b10215" containerName="nova-cell0-conductor-db-sync" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.128612 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.131719 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.131903 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jnb5t" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.137210 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.305716 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.306104 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7bm8\" (UniqueName: \"kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.306162 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.407417 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.407480 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7bm8\" (UniqueName: \"kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.407535 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.420778 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.420956 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.425576 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7bm8\" (UniqueName: \"kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8\") pod \"nova-cell0-conductor-0\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.461499 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:38 crc kubenswrapper[4863]: I1205 08:13:38.929278 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:13:39 crc kubenswrapper[4863]: I1205 08:13:39.027489 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"969424ae-7de9-4f26-b3c5-c7e50563e32a","Type":"ContainerStarted","Data":"28f1c3ca7de0d15ea7f98869d746af935e23efb602eda296c67fa5e7eeb8368d"} Dec 05 08:13:40 crc kubenswrapper[4863]: I1205 08:13:40.038818 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"969424ae-7de9-4f26-b3c5-c7e50563e32a","Type":"ContainerStarted","Data":"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb"} Dec 05 08:13:40 crc kubenswrapper[4863]: I1205 08:13:40.039167 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:40 crc kubenswrapper[4863]: I1205 08:13:40.068678 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.068654864 podStartE2EDuration="2.068654864s" podCreationTimestamp="2025-12-05 08:13:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:40.066393899 +0000 UTC m=+5247.792390999" watchObservedRunningTime="2025-12-05 08:13:40.068654864 +0000 UTC m=+5247.794651904" Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.497646 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.962714 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-trgdq"] Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.964164 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.967775 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.974015 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 08:13:48 crc kubenswrapper[4863]: I1205 08:13:48.978932 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-trgdq"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.134015 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrfh7\" (UniqueName: \"kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.134069 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.134102 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.134179 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.138851 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.140899 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.142585 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.160427 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.222712 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.224307 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.233211 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.236146 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.236272 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrfh7\" (UniqueName: \"kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.236307 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.236341 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.242622 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.244063 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.246826 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.247744 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.272901 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrfh7\" (UniqueName: \"kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7\") pod \"nova-cell0-cell-mapping-trgdq\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.286528 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.287696 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.289056 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.300501 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.311717 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.372662 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.372981 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373128 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373317 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-794s4\" (UniqueName: \"kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373438 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373619 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8qzk\" (UniqueName: \"kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.373952 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.374111 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n5pc\" (UniqueName: \"kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.374300 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.374454 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477392 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477443 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477481 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477523 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-794s4\" (UniqueName: \"kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477538 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477569 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477605 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8qzk\" (UniqueName: \"kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477637 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477666 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n5pc\" (UniqueName: \"kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477703 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.477734 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.478205 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.478582 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.499733 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.503937 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.504269 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.505062 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.506846 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-794s4\" (UniqueName: \"kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.510580 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.518513 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.519194 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8qzk\" (UniqueName: \"kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk\") pod \"nova-metadata-0\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.522795 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.525954 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.528250 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.529179 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n5pc\" (UniqueName: \"kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc\") pod \"nova-api-0\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.581136 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.594617 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.597039 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.606907 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.681867 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dfv8\" (UniqueName: \"kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682187 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682310 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682420 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682580 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682675 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq4ws\" (UniqueName: \"kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682788 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.682910 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.710973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.772705 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.772776 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784651 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784701 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq4ws\" (UniqueName: \"kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784753 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784768 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784818 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dfv8\" (UniqueName: \"kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784845 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784873 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.784898 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.785832 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.786128 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.786422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.786842 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.788863 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.789113 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.815371 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dfv8\" (UniqueName: \"kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.824482 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq4ws\" (UniqueName: \"kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws\") pod \"dnsmasq-dns-5bdcdccdd9-p49k6\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.890633 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.936151 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.944256 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vbsxf"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.945492 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.949193 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.950515 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.974169 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vbsxf"] Dec 05 08:13:49 crc kubenswrapper[4863]: I1205 08:13:49.982613 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-trgdq"] Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.091900 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.091944 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.091969 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.092126 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5hxd\" (UniqueName: \"kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.174258 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-trgdq" event={"ID":"e60035b0-2cb5-4329-973f-8ff053e9a3b3","Type":"ContainerStarted","Data":"d0f2744964eb39cbced247c4e45d638c851588a1f702a8031e46f85956d220a5"} Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.174827 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-trgdq" event={"ID":"e60035b0-2cb5-4329-973f-8ff053e9a3b3","Type":"ContainerStarted","Data":"a50c01e8cc60f5c1765c7ddce035b56cb0b069d7ec87a096aabb1f57b2b68265"} Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.197118 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.197159 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.197183 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.197253 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5hxd\" (UniqueName: \"kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.201199 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.202953 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.206936 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.209855 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-trgdq" podStartSLOduration=2.209808563 podStartE2EDuration="2.209808563s" podCreationTimestamp="2025-12-05 08:13:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:50.196158351 +0000 UTC m=+5257.922155391" watchObservedRunningTime="2025-12-05 08:13:50.209808563 +0000 UTC m=+5257.935805613" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.214627 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5hxd\" (UniqueName: \"kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd\") pod \"nova-cell1-conductor-db-sync-vbsxf\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.249720 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.271590 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.383833 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.449754 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:13:50 crc kubenswrapper[4863]: W1205 08:13:50.470228 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3ecd8f0_9470_4b55_8657_2a1aba539e56.slice/crio-a41c2f3e510e1c4d2bd1e64ec0ad4b4e543c107a07ff6223e2078316b4a4909b WatchSource:0}: Error finding container a41c2f3e510e1c4d2bd1e64ec0ad4b4e543c107a07ff6223e2078316b4a4909b: Status 404 returned error can't find the container with id a41c2f3e510e1c4d2bd1e64ec0ad4b4e543c107a07ff6223e2078316b4a4909b Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.487330 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:13:50 crc kubenswrapper[4863]: W1205 08:13:50.495055 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c654e7_bbeb_4c16_aa26_243de5dfe419.slice/crio-f5dbd0c05351fc7c95f7e11dee1ba36c932d6fb509f9c9ae88a44d35fe880e80 WatchSource:0}: Error finding container f5dbd0c05351fc7c95f7e11dee1ba36c932d6fb509f9c9ae88a44d35fe880e80: Status 404 returned error can't find the container with id f5dbd0c05351fc7c95f7e11dee1ba36c932d6fb509f9c9ae88a44d35fe880e80 Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.580993 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:13:50 crc kubenswrapper[4863]: W1205 08:13:50.590228 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d7f256d_7571_494f_87d9_8e750ca22ba9.slice/crio-2c01240d1cf36ad09a43b868c6dacedb428b58f8d044108693183d5f4da5dbfb WatchSource:0}: Error finding container 2c01240d1cf36ad09a43b868c6dacedb428b58f8d044108693183d5f4da5dbfb: Status 404 returned error can't find the container with id 2c01240d1cf36ad09a43b868c6dacedb428b58f8d044108693183d5f4da5dbfb Dec 05 08:13:50 crc kubenswrapper[4863]: I1205 08:13:50.743913 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vbsxf"] Dec 05 08:13:50 crc kubenswrapper[4863]: W1205 08:13:50.760237 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f3a7e74_9e74_480a_9232_a7b789c218d6.slice/crio-cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7 WatchSource:0}: Error finding container cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7: Status 404 returned error can't find the container with id cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7 Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.184970 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerStarted","Data":"57df2569352e99a9328c9a3e13e523a2a7c9c777fc0e787dc6d5bb8bb996b8ee"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.186613 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a1c654e7-bbeb-4c16-aa26-243de5dfe419","Type":"ContainerStarted","Data":"f5dbd0c05351fc7c95f7e11dee1ba36c932d6fb509f9c9ae88a44d35fe880e80"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.189362 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" event={"ID":"9f3a7e74-9e74-480a-9232-a7b789c218d6","Type":"ContainerStarted","Data":"dc3696c23e735e4d10e0f7dfc5ec4c84c16b7dad66024a5a1d121806458d9b56"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.189396 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" event={"ID":"9f3a7e74-9e74-480a-9232-a7b789c218d6","Type":"ContainerStarted","Data":"cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.197581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a3ecd8f0-9470-4b55-8657-2a1aba539e56","Type":"ContainerStarted","Data":"a41c2f3e510e1c4d2bd1e64ec0ad4b4e543c107a07ff6223e2078316b4a4909b"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.199801 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerStarted","Data":"889a6b5825f71da3a91f95e4cbf5a820d18dd2c845966f5c800f8d5c32470bb4"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.202659 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerID="a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421" exitCode=0 Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.204095 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" event={"ID":"5d7f256d-7571-494f-87d9-8e750ca22ba9","Type":"ContainerDied","Data":"a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.204116 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" event={"ID":"5d7f256d-7571-494f-87d9-8e750ca22ba9","Type":"ContainerStarted","Data":"2c01240d1cf36ad09a43b868c6dacedb428b58f8d044108693183d5f4da5dbfb"} Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.211178 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" podStartSLOduration=2.211160549 podStartE2EDuration="2.211160549s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:51.209220993 +0000 UTC m=+5258.935218033" watchObservedRunningTime="2025-12-05 08:13:51.211160549 +0000 UTC m=+5258.937157589" Dec 05 08:13:51 crc kubenswrapper[4863]: I1205 08:13:51.601597 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:13:51 crc kubenswrapper[4863]: E1205 08:13:51.603201 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.220984 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a3ecd8f0-9470-4b55-8657-2a1aba539e56","Type":"ContainerStarted","Data":"5772bcb2a3e0c7889d635921717b3d759f58af26ee5d45aee8d159462ca3cc5e"} Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.223384 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerStarted","Data":"893bc0ac01202a5a6adee5cc3642e5edee81bda4d50794f5b852f56aca03e66c"} Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.228137 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" event={"ID":"5d7f256d-7571-494f-87d9-8e750ca22ba9","Type":"ContainerStarted","Data":"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8"} Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.228187 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.229459 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerStarted","Data":"f9d14acf333acadf951937977852bfc679f69236975dabcf5387fbbf982855e5"} Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.231233 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a1c654e7-bbeb-4c16-aa26-243de5dfe419","Type":"ContainerStarted","Data":"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1"} Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.248061 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.966990065 podStartE2EDuration="4.248032813s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="2025-12-05 08:13:50.479943287 +0000 UTC m=+5258.205940337" lastFinishedPulling="2025-12-05 08:13:52.760986045 +0000 UTC m=+5260.486983085" observedRunningTime="2025-12-05 08:13:53.236965824 +0000 UTC m=+5260.962962864" watchObservedRunningTime="2025-12-05 08:13:53.248032813 +0000 UTC m=+5260.974029863" Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.266666 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.014272023 podStartE2EDuration="4.266639114s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="2025-12-05 08:13:50.505031826 +0000 UTC m=+5258.231028866" lastFinishedPulling="2025-12-05 08:13:52.757398907 +0000 UTC m=+5260.483395957" observedRunningTime="2025-12-05 08:13:53.254405727 +0000 UTC m=+5260.980402787" watchObservedRunningTime="2025-12-05 08:13:53.266639114 +0000 UTC m=+5260.992636164" Dec 05 08:13:53 crc kubenswrapper[4863]: I1205 08:13:53.285658 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" podStartSLOduration=4.285639766 podStartE2EDuration="4.285639766s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:53.28091381 +0000 UTC m=+5261.006910870" watchObservedRunningTime="2025-12-05 08:13:53.285639766 +0000 UTC m=+5261.011636806" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.242022 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerStarted","Data":"40f7ef42592fb6f3254546774ecde39ff948c49ea780f9296957974d59a03c1b"} Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.244352 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerStarted","Data":"a99c71c5dba8bfbb4f8cc69af9b6f43bc541aa6635fc8f271946abc6c15188d9"} Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.247029 4863 generic.go:334] "Generic (PLEG): container finished" podID="9f3a7e74-9e74-480a-9232-a7b789c218d6" containerID="dc3696c23e735e4d10e0f7dfc5ec4c84c16b7dad66024a5a1d121806458d9b56" exitCode=0 Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.247083 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" event={"ID":"9f3a7e74-9e74-480a-9232-a7b789c218d6","Type":"ContainerDied","Data":"dc3696c23e735e4d10e0f7dfc5ec4c84c16b7dad66024a5a1d121806458d9b56"} Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.267523 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.911146495 podStartE2EDuration="5.2675072s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="2025-12-05 08:13:50.400987461 +0000 UTC m=+5258.126984501" lastFinishedPulling="2025-12-05 08:13:52.757348166 +0000 UTC m=+5260.483345206" observedRunningTime="2025-12-05 08:13:54.263444861 +0000 UTC m=+5261.989441901" watchObservedRunningTime="2025-12-05 08:13:54.2675072 +0000 UTC m=+5261.993504240" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.312311 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.824187334 podStartE2EDuration="5.312292706s" podCreationTimestamp="2025-12-05 08:13:49 +0000 UTC" firstStartedPulling="2025-12-05 08:13:50.267990954 +0000 UTC m=+5257.993987994" lastFinishedPulling="2025-12-05 08:13:52.756096326 +0000 UTC m=+5260.482093366" observedRunningTime="2025-12-05 08:13:54.303169364 +0000 UTC m=+5262.029166404" watchObservedRunningTime="2025-12-05 08:13:54.312292706 +0000 UTC m=+5262.038289746" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.774339 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.774437 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.774453 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:13:54 crc kubenswrapper[4863]: I1205 08:13:54.891423 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.600123 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.712389 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts\") pod \"9f3a7e74-9e74-480a-9232-a7b789c218d6\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.712760 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5hxd\" (UniqueName: \"kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd\") pod \"9f3a7e74-9e74-480a-9232-a7b789c218d6\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.712898 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data\") pod \"9f3a7e74-9e74-480a-9232-a7b789c218d6\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.712955 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle\") pod \"9f3a7e74-9e74-480a-9232-a7b789c218d6\" (UID: \"9f3a7e74-9e74-480a-9232-a7b789c218d6\") " Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.721441 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd" (OuterVolumeSpecName: "kube-api-access-c5hxd") pod "9f3a7e74-9e74-480a-9232-a7b789c218d6" (UID: "9f3a7e74-9e74-480a-9232-a7b789c218d6"). InnerVolumeSpecName "kube-api-access-c5hxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.724807 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts" (OuterVolumeSpecName: "scripts") pod "9f3a7e74-9e74-480a-9232-a7b789c218d6" (UID: "9f3a7e74-9e74-480a-9232-a7b789c218d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.748819 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data" (OuterVolumeSpecName: "config-data") pod "9f3a7e74-9e74-480a-9232-a7b789c218d6" (UID: "9f3a7e74-9e74-480a-9232-a7b789c218d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.753202 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f3a7e74-9e74-480a-9232-a7b789c218d6" (UID: "9f3a7e74-9e74-480a-9232-a7b789c218d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.783001 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:13:55 crc kubenswrapper[4863]: E1205 08:13:55.783384 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f3a7e74-9e74-480a-9232-a7b789c218d6" containerName="nova-cell1-conductor-db-sync" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.783397 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f3a7e74-9e74-480a-9232-a7b789c218d6" containerName="nova-cell1-conductor-db-sync" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.783619 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f3a7e74-9e74-480a-9232-a7b789c218d6" containerName="nova-cell1-conductor-db-sync" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.784846 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.800901 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.814948 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.814978 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5hxd\" (UniqueName: \"kubernetes.io/projected/9f3a7e74-9e74-480a-9232-a7b789c218d6-kube-api-access-c5hxd\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.814989 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.814997 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a7e74-9e74-480a-9232-a7b789c218d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.916197 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx49w\" (UniqueName: \"kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.916347 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:55 crc kubenswrapper[4863]: I1205 08:13:55.916387 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.017732 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx49w\" (UniqueName: \"kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.017866 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.017901 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.018331 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.018493 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.039305 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx49w\" (UniqueName: \"kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w\") pod \"community-operators-kxp67\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.150264 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.280513 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.281447 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vbsxf" event={"ID":"9f3a7e74-9e74-480a-9232-a7b789c218d6","Type":"ContainerDied","Data":"cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7"} Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.281502 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb8332a40d302fffd0c33f8d57a2beb9e3455b2f4c3bd94b5120eb6cd6c4a4b7" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.293252 4863 generic.go:334] "Generic (PLEG): container finished" podID="e60035b0-2cb5-4329-973f-8ff053e9a3b3" containerID="d0f2744964eb39cbced247c4e45d638c851588a1f702a8031e46f85956d220a5" exitCode=0 Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.293740 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-trgdq" event={"ID":"e60035b0-2cb5-4329-973f-8ff053e9a3b3","Type":"ContainerDied","Data":"d0f2744964eb39cbced247c4e45d638c851588a1f702a8031e46f85956d220a5"} Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.391352 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.392908 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.397407 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.403631 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.542126 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.542173 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.542205 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v82bc\" (UniqueName: \"kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.644101 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.644152 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.644187 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v82bc\" (UniqueName: \"kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.649517 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.650425 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.670154 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v82bc\" (UniqueName: \"kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc\") pod \"nova-cell1-conductor-0\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.725291 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:56 crc kubenswrapper[4863]: I1205 08:13:56.783131 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.233268 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:13:57 crc kubenswrapper[4863]: W1205 08:13:57.234275 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda542d65a_3183_4576_9cac_d49b7610ecf3.slice/crio-902ab79be26add44244ae5fa55e5485dcc44a29cc224f6244c4f706140217fa7 WatchSource:0}: Error finding container 902ab79be26add44244ae5fa55e5485dcc44a29cc224f6244c4f706140217fa7: Status 404 returned error can't find the container with id 902ab79be26add44244ae5fa55e5485dcc44a29cc224f6244c4f706140217fa7 Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.308692 4863 generic.go:334] "Generic (PLEG): container finished" podID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerID="a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b" exitCode=0 Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.308932 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerDied","Data":"a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b"} Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.308982 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerStarted","Data":"4fa825efc425bbced68640b8867f22e7c6360c08445685ba985aef4568d9dd97"} Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.310554 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a542d65a-3183-4576-9cac-d49b7610ecf3","Type":"ContainerStarted","Data":"902ab79be26add44244ae5fa55e5485dcc44a29cc224f6244c4f706140217fa7"} Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.684957 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.768193 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle\") pod \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.768583 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data\") pod \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.769132 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrfh7\" (UniqueName: \"kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7\") pod \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.769211 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts\") pod \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\" (UID: \"e60035b0-2cb5-4329-973f-8ff053e9a3b3\") " Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.773937 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7" (OuterVolumeSpecName: "kube-api-access-zrfh7") pod "e60035b0-2cb5-4329-973f-8ff053e9a3b3" (UID: "e60035b0-2cb5-4329-973f-8ff053e9a3b3"). InnerVolumeSpecName "kube-api-access-zrfh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.777870 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts" (OuterVolumeSpecName: "scripts") pod "e60035b0-2cb5-4329-973f-8ff053e9a3b3" (UID: "e60035b0-2cb5-4329-973f-8ff053e9a3b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.799767 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data" (OuterVolumeSpecName: "config-data") pod "e60035b0-2cb5-4329-973f-8ff053e9a3b3" (UID: "e60035b0-2cb5-4329-973f-8ff053e9a3b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.805005 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e60035b0-2cb5-4329-973f-8ff053e9a3b3" (UID: "e60035b0-2cb5-4329-973f-8ff053e9a3b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.871139 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrfh7\" (UniqueName: \"kubernetes.io/projected/e60035b0-2cb5-4329-973f-8ff053e9a3b3-kube-api-access-zrfh7\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.871171 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.871182 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:57 crc kubenswrapper[4863]: I1205 08:13:57.871190 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e60035b0-2cb5-4329-973f-8ff053e9a3b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.323503 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-trgdq" Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.323538 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-trgdq" event={"ID":"e60035b0-2cb5-4329-973f-8ff053e9a3b3","Type":"ContainerDied","Data":"a50c01e8cc60f5c1765c7ddce035b56cb0b069d7ec87a096aabb1f57b2b68265"} Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.324068 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a50c01e8cc60f5c1765c7ddce035b56cb0b069d7ec87a096aabb1f57b2b68265" Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.325724 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a542d65a-3183-4576-9cac-d49b7610ecf3","Type":"ContainerStarted","Data":"b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d"} Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.325937 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.329365 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerStarted","Data":"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8"} Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.354637 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.35461873 podStartE2EDuration="2.35461873s" podCreationTimestamp="2025-12-05 08:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:13:58.349867915 +0000 UTC m=+5266.075864955" watchObservedRunningTime="2025-12-05 08:13:58.35461873 +0000 UTC m=+5266.080615770" Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.497895 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.498123 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-log" containerID="cri-o://f9d14acf333acadf951937977852bfc679f69236975dabcf5387fbbf982855e5" gracePeriod=30 Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.498206 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-api" containerID="cri-o://a99c71c5dba8bfbb4f8cc69af9b6f43bc541aa6635fc8f271946abc6c15188d9" gracePeriod=30 Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.512031 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.512269 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" containerName="nova-scheduler-scheduler" containerID="cri-o://f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1" gracePeriod=30 Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.555159 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.555391 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-log" containerID="cri-o://893bc0ac01202a5a6adee5cc3642e5edee81bda4d50794f5b852f56aca03e66c" gracePeriod=30 Dec 05 08:13:58 crc kubenswrapper[4863]: I1205 08:13:58.555556 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-metadata" containerID="cri-o://40f7ef42592fb6f3254546774ecde39ff948c49ea780f9296957974d59a03c1b" gracePeriod=30 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.363667 4863 generic.go:334] "Generic (PLEG): container finished" podID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerID="7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8" exitCode=0 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.364198 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerDied","Data":"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8"} Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.374000 4863 generic.go:334] "Generic (PLEG): container finished" podID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerID="40f7ef42592fb6f3254546774ecde39ff948c49ea780f9296957974d59a03c1b" exitCode=0 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.374028 4863 generic.go:334] "Generic (PLEG): container finished" podID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerID="893bc0ac01202a5a6adee5cc3642e5edee81bda4d50794f5b852f56aca03e66c" exitCode=143 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.374096 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerDied","Data":"40f7ef42592fb6f3254546774ecde39ff948c49ea780f9296957974d59a03c1b"} Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.374128 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerDied","Data":"893bc0ac01202a5a6adee5cc3642e5edee81bda4d50794f5b852f56aca03e66c"} Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.378690 4863 generic.go:334] "Generic (PLEG): container finished" podID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerID="a99c71c5dba8bfbb4f8cc69af9b6f43bc541aa6635fc8f271946abc6c15188d9" exitCode=0 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.378715 4863 generic.go:334] "Generic (PLEG): container finished" podID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerID="f9d14acf333acadf951937977852bfc679f69236975dabcf5387fbbf982855e5" exitCode=143 Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.378970 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerDied","Data":"a99c71c5dba8bfbb4f8cc69af9b6f43bc541aa6635fc8f271946abc6c15188d9"} Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.379023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerDied","Data":"f9d14acf333acadf951937977852bfc679f69236975dabcf5387fbbf982855e5"} Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.687356 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.699386 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.809864 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs\") pod \"e6d0ac29-808b-4e66-b27e-2c04143e5191\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.809922 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle\") pod \"e6d0ac29-808b-4e66-b27e-2c04143e5191\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810037 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle\") pod \"d5e94393-5304-4608-860a-ca1a1f0f20eb\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810103 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data\") pod \"d5e94393-5304-4608-860a-ca1a1f0f20eb\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810133 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8qzk\" (UniqueName: \"kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk\") pod \"e6d0ac29-808b-4e66-b27e-2c04143e5191\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810161 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs\") pod \"d5e94393-5304-4608-860a-ca1a1f0f20eb\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810187 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data\") pod \"e6d0ac29-808b-4e66-b27e-2c04143e5191\" (UID: \"e6d0ac29-808b-4e66-b27e-2c04143e5191\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810227 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5n5pc\" (UniqueName: \"kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc\") pod \"d5e94393-5304-4608-860a-ca1a1f0f20eb\" (UID: \"d5e94393-5304-4608-860a-ca1a1f0f20eb\") " Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810341 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs" (OuterVolumeSpecName: "logs") pod "e6d0ac29-808b-4e66-b27e-2c04143e5191" (UID: "e6d0ac29-808b-4e66-b27e-2c04143e5191"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810456 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs" (OuterVolumeSpecName: "logs") pod "d5e94393-5304-4608-860a-ca1a1f0f20eb" (UID: "d5e94393-5304-4608-860a-ca1a1f0f20eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810725 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6d0ac29-808b-4e66-b27e-2c04143e5191-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.810750 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e94393-5304-4608-860a-ca1a1f0f20eb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.815170 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc" (OuterVolumeSpecName: "kube-api-access-5n5pc") pod "d5e94393-5304-4608-860a-ca1a1f0f20eb" (UID: "d5e94393-5304-4608-860a-ca1a1f0f20eb"). InnerVolumeSpecName "kube-api-access-5n5pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.816136 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk" (OuterVolumeSpecName: "kube-api-access-p8qzk") pod "e6d0ac29-808b-4e66-b27e-2c04143e5191" (UID: "e6d0ac29-808b-4e66-b27e-2c04143e5191"). InnerVolumeSpecName "kube-api-access-p8qzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.838969 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5e94393-5304-4608-860a-ca1a1f0f20eb" (UID: "d5e94393-5304-4608-860a-ca1a1f0f20eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.846538 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data" (OuterVolumeSpecName: "config-data") pod "d5e94393-5304-4608-860a-ca1a1f0f20eb" (UID: "d5e94393-5304-4608-860a-ca1a1f0f20eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.849709 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data" (OuterVolumeSpecName: "config-data") pod "e6d0ac29-808b-4e66-b27e-2c04143e5191" (UID: "e6d0ac29-808b-4e66-b27e-2c04143e5191"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.869656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6d0ac29-808b-4e66-b27e-2c04143e5191" (UID: "e6d0ac29-808b-4e66-b27e-2c04143e5191"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.890893 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.907134 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.912862 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.912920 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8qzk\" (UniqueName: \"kubernetes.io/projected/e6d0ac29-808b-4e66-b27e-2c04143e5191-kube-api-access-p8qzk\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.912939 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.912955 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5n5pc\" (UniqueName: \"kubernetes.io/projected/d5e94393-5304-4608-860a-ca1a1f0f20eb-kube-api-access-5n5pc\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.912996 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d0ac29-808b-4e66-b27e-2c04143e5191-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.913012 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e94393-5304-4608-860a-ca1a1f0f20eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:13:59 crc kubenswrapper[4863]: I1205 08:13:59.941696 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.017720 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.017965 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="dnsmasq-dns" containerID="cri-o://5190debf9deea84fef2b5106b984f1fcf8118a9346be06867692ea26fb4f2e11" gracePeriod=10 Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.391286 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerStarted","Data":"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900"} Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.394500 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e6d0ac29-808b-4e66-b27e-2c04143e5191","Type":"ContainerDied","Data":"889a6b5825f71da3a91f95e4cbf5a820d18dd2c845966f5c800f8d5c32470bb4"} Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.394518 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.394555 4863 scope.go:117] "RemoveContainer" containerID="40f7ef42592fb6f3254546774ecde39ff948c49ea780f9296957974d59a03c1b" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.397084 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d5e94393-5304-4608-860a-ca1a1f0f20eb","Type":"ContainerDied","Data":"57df2569352e99a9328c9a3e13e523a2a7c9c777fc0e787dc6d5bb8bb996b8ee"} Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.397092 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.403747 4863 generic.go:334] "Generic (PLEG): container finished" podID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerID="5190debf9deea84fef2b5106b984f1fcf8118a9346be06867692ea26fb4f2e11" exitCode=0 Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.403844 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" event={"ID":"ac70b169-15e5-4d35-aad0-9bdc71a5a505","Type":"ContainerDied","Data":"5190debf9deea84fef2b5106b984f1fcf8118a9346be06867692ea26fb4f2e11"} Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.415702 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.423240 4863 scope.go:117] "RemoveContainer" containerID="893bc0ac01202a5a6adee5cc3642e5edee81bda4d50794f5b852f56aca03e66c" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.429276 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kxp67" podStartSLOduration=2.85264733 podStartE2EDuration="5.42925864s" podCreationTimestamp="2025-12-05 08:13:55 +0000 UTC" firstStartedPulling="2025-12-05 08:13:57.312305719 +0000 UTC m=+5265.038302769" lastFinishedPulling="2025-12-05 08:13:59.888917019 +0000 UTC m=+5267.614914079" observedRunningTime="2025-12-05 08:14:00.423889029 +0000 UTC m=+5268.149886069" watchObservedRunningTime="2025-12-05 08:14:00.42925864 +0000 UTC m=+5268.155255680" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.480637 4863 scope.go:117] "RemoveContainer" containerID="a99c71c5dba8bfbb4f8cc69af9b6f43bc541aa6635fc8f271946abc6c15188d9" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.484384 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.492013 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.502098 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.510531 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.520704 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: E1205 08:14:00.525933 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60035b0-2cb5-4329-973f-8ff053e9a3b3" containerName="nova-manage" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.525974 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60035b0-2cb5-4329-973f-8ff053e9a3b3" containerName="nova-manage" Dec 05 08:14:00 crc kubenswrapper[4863]: E1205 08:14:00.525999 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-metadata" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.526007 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-metadata" Dec 05 08:14:00 crc kubenswrapper[4863]: E1205 08:14:00.526034 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-api" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.526045 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-api" Dec 05 08:14:00 crc kubenswrapper[4863]: E1205 08:14:00.526094 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-log" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.526101 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-log" Dec 05 08:14:00 crc kubenswrapper[4863]: E1205 08:14:00.526120 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-log" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.526128 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-log" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.528688 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-metadata" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.528710 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-log" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.528744 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" containerName="nova-metadata-log" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.528764 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" containerName="nova-api-api" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.528771 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60035b0-2cb5-4329-973f-8ff053e9a3b3" containerName="nova-manage" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.552689 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.562530 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.565869 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.571340 4863 scope.go:117] "RemoveContainer" containerID="f9d14acf333acadf951937977852bfc679f69236975dabcf5387fbbf982855e5" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.591550 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.592947 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.597773 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.600604 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.617846 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5e94393-5304-4608-860a-ca1a1f0f20eb" path="/var/lib/kubelet/pods/d5e94393-5304-4608-860a-ca1a1f0f20eb/volumes" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.618771 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6d0ac29-808b-4e66-b27e-2c04143e5191" path="/var/lib/kubelet/pods/e6d0ac29-808b-4e66-b27e-2c04143e5191/volumes" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642150 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-294c7\" (UniqueName: \"kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642232 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642257 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642433 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642591 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642619 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642650 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.642717 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744571 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744680 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744726 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744748 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744765 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744797 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.744817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-294c7\" (UniqueName: \"kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.746148 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.747215 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.766112 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.769133 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.769159 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.770245 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-294c7\" (UniqueName: \"kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.770421 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data\") pod \"nova-metadata-0\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.773006 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5\") pod \"nova-api-0\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.879923 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.948371 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:00 crc kubenswrapper[4863]: I1205 08:14:00.970014 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.049376 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-794s4\" (UniqueName: \"kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4\") pod \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.049421 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data\") pod \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.049536 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle\") pod \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\" (UID: \"a1c654e7-bbeb-4c16-aa26-243de5dfe419\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.058034 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4" (OuterVolumeSpecName: "kube-api-access-794s4") pod "a1c654e7-bbeb-4c16-aa26-243de5dfe419" (UID: "a1c654e7-bbeb-4c16-aa26-243de5dfe419"). InnerVolumeSpecName "kube-api-access-794s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.058528 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.073657 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1c654e7-bbeb-4c16-aa26-243de5dfe419" (UID: "a1c654e7-bbeb-4c16-aa26-243de5dfe419"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.087053 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data" (OuterVolumeSpecName: "config-data") pod "a1c654e7-bbeb-4c16-aa26-243de5dfe419" (UID: "a1c654e7-bbeb-4c16-aa26-243de5dfe419"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152141 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb\") pod \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152183 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc\") pod \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152234 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb\") pod \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152334 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config\") pod \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152410 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9pqq\" (UniqueName: \"kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq\") pod \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\" (UID: \"ac70b169-15e5-4d35-aad0-9bdc71a5a505\") " Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152738 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152749 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-794s4\" (UniqueName: \"kubernetes.io/projected/a1c654e7-bbeb-4c16-aa26-243de5dfe419-kube-api-access-794s4\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.152758 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c654e7-bbeb-4c16-aa26-243de5dfe419-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.161693 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq" (OuterVolumeSpecName: "kube-api-access-n9pqq") pod "ac70b169-15e5-4d35-aad0-9bdc71a5a505" (UID: "ac70b169-15e5-4d35-aad0-9bdc71a5a505"). InnerVolumeSpecName "kube-api-access-n9pqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.201340 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config" (OuterVolumeSpecName: "config") pod "ac70b169-15e5-4d35-aad0-9bdc71a5a505" (UID: "ac70b169-15e5-4d35-aad0-9bdc71a5a505"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.202656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ac70b169-15e5-4d35-aad0-9bdc71a5a505" (UID: "ac70b169-15e5-4d35-aad0-9bdc71a5a505"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.205772 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ac70b169-15e5-4d35-aad0-9bdc71a5a505" (UID: "ac70b169-15e5-4d35-aad0-9bdc71a5a505"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.223817 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ac70b169-15e5-4d35-aad0-9bdc71a5a505" (UID: "ac70b169-15e5-4d35-aad0-9bdc71a5a505"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.253881 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.253922 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9pqq\" (UniqueName: \"kubernetes.io/projected/ac70b169-15e5-4d35-aad0-9bdc71a5a505-kube-api-access-n9pqq\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.253935 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.253945 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.253954 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac70b169-15e5-4d35-aad0-9bdc71a5a505-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.414858 4863 generic.go:334] "Generic (PLEG): container finished" podID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" containerID="f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1" exitCode=0 Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.414896 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.414917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a1c654e7-bbeb-4c16-aa26-243de5dfe419","Type":"ContainerDied","Data":"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1"} Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.415264 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a1c654e7-bbeb-4c16-aa26-243de5dfe419","Type":"ContainerDied","Data":"f5dbd0c05351fc7c95f7e11dee1ba36c932d6fb509f9c9ae88a44d35fe880e80"} Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.415280 4863 scope.go:117] "RemoveContainer" containerID="f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.420567 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" event={"ID":"ac70b169-15e5-4d35-aad0-9bdc71a5a505","Type":"ContainerDied","Data":"eecff0852da0258a3c727dc105740676d42b5992c2b0461f0d0c79a3f7e263fb"} Dec 05 08:14:01 crc kubenswrapper[4863]: W1205 08:14:01.420634 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47577fd8_d815_45c1_b3b5_c2f5fa260506.slice/crio-b2b0630ba9de6ab1d62575650b902ff2aec755aa3276f66b859ea4925dee9d5d WatchSource:0}: Error finding container b2b0630ba9de6ab1d62575650b902ff2aec755aa3276f66b859ea4925dee9d5d: Status 404 returned error can't find the container with id b2b0630ba9de6ab1d62575650b902ff2aec755aa3276f66b859ea4925dee9d5d Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.420688 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b44f6965-dkkhm" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.422743 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.446183 4863 scope.go:117] "RemoveContainer" containerID="f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1" Dec 05 08:14:01 crc kubenswrapper[4863]: E1205 08:14:01.446768 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1\": container with ID starting with f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1 not found: ID does not exist" containerID="f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.446854 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1"} err="failed to get container status \"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1\": rpc error: code = NotFound desc = could not find container \"f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1\": container with ID starting with f512a2ed0930c11fc4156770009f76a65c7344f4beb5cf3d910862e5b63face1 not found: ID does not exist" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.446884 4863 scope.go:117] "RemoveContainer" containerID="5190debf9deea84fef2b5106b984f1fcf8118a9346be06867692ea26fb4f2e11" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.459804 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.480444 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.490239 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.495657 4863 scope.go:117] "RemoveContainer" containerID="0a1a4acbc52d9c9810bab70b409191e7e15e099cfef4b17e034fc46fc2c1f73d" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.501623 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.513557 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: E1205 08:14:01.514033 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="init" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.514056 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="init" Dec 05 08:14:01 crc kubenswrapper[4863]: E1205 08:14:01.514076 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" containerName="nova-scheduler-scheduler" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.514085 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" containerName="nova-scheduler-scheduler" Dec 05 08:14:01 crc kubenswrapper[4863]: E1205 08:14:01.514127 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="dnsmasq-dns" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.514137 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="dnsmasq-dns" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.514376 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" containerName="nova-scheduler-scheduler" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.514411 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" containerName="dnsmasq-dns" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.515170 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.516772 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.523068 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58b44f6965-dkkhm"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.532523 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.560622 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvwpj\" (UniqueName: \"kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.560765 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.560806 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.662674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.662747 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.662824 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvwpj\" (UniqueName: \"kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.667581 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.667992 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.699002 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvwpj\" (UniqueName: \"kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj\") pod \"nova-scheduler-0\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:01 crc kubenswrapper[4863]: I1205 08:14:01.761104 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.252868 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.437620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerStarted","Data":"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.437685 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerStarted","Data":"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.437698 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerStarted","Data":"b2b0630ba9de6ab1d62575650b902ff2aec755aa3276f66b859ea4925dee9d5d"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.440451 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e","Type":"ContainerStarted","Data":"924195d25c629cf2f4d709e699423e9a01e218052ce88dc29435f419dec62ebc"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.443287 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerStarted","Data":"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.443408 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerStarted","Data":"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.443427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerStarted","Data":"6cd6b9ae57058e295884e2d7dcef11293e67a8710a9a9c628956f344c30d63d2"} Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.466870 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.466849929 podStartE2EDuration="2.466849929s" podCreationTimestamp="2025-12-05 08:14:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:02.459613244 +0000 UTC m=+5270.185610284" watchObservedRunningTime="2025-12-05 08:14:02.466849929 +0000 UTC m=+5270.192846969" Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.486380 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.486364163 podStartE2EDuration="2.486364163s" podCreationTimestamp="2025-12-05 08:14:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:02.482887678 +0000 UTC m=+5270.208884718" watchObservedRunningTime="2025-12-05 08:14:02.486364163 +0000 UTC m=+5270.212361203" Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.625131 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1c654e7-bbeb-4c16-aa26-243de5dfe419" path="/var/lib/kubelet/pods/a1c654e7-bbeb-4c16-aa26-243de5dfe419/volumes" Dec 05 08:14:02 crc kubenswrapper[4863]: I1205 08:14:02.626011 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac70b169-15e5-4d35-aad0-9bdc71a5a505" path="/var/lib/kubelet/pods/ac70b169-15e5-4d35-aad0-9bdc71a5a505/volumes" Dec 05 08:14:03 crc kubenswrapper[4863]: I1205 08:14:03.466790 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e","Type":"ContainerStarted","Data":"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e"} Dec 05 08:14:03 crc kubenswrapper[4863]: I1205 08:14:03.493737 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.493714195 podStartE2EDuration="2.493714195s" podCreationTimestamp="2025-12-05 08:14:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:03.490994909 +0000 UTC m=+5271.216991959" watchObservedRunningTime="2025-12-05 08:14:03.493714195 +0000 UTC m=+5271.219711255" Dec 05 08:14:05 crc kubenswrapper[4863]: I1205 08:14:05.949979 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:14:05 crc kubenswrapper[4863]: I1205 08:14:05.950311 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.151243 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.151299 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.222990 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.576981 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.601860 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:14:06 crc kubenswrapper[4863]: E1205 08:14:06.602978 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.628525 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.761794 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:14:06 crc kubenswrapper[4863]: I1205 08:14:06.769285 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.273409 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-xhsv5"] Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.275998 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.281266 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.282214 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.293996 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xhsv5"] Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.373664 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.373941 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kw45\" (UniqueName: \"kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.374242 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.374454 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.476884 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kw45\" (UniqueName: \"kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.477008 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.477061 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.477155 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.482500 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.482926 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.488404 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.495264 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kw45\" (UniqueName: \"kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45\") pod \"nova-cell1-cell-mapping-xhsv5\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:07 crc kubenswrapper[4863]: I1205 08:14:07.602452 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:08 crc kubenswrapper[4863]: I1205 08:14:08.079885 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xhsv5"] Dec 05 08:14:08 crc kubenswrapper[4863]: W1205 08:14:08.086413 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59cd1339_d5a8_41ac_aed3_9148fd26816a.slice/crio-2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9 WatchSource:0}: Error finding container 2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9: Status 404 returned error can't find the container with id 2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9 Dec 05 08:14:08 crc kubenswrapper[4863]: I1205 08:14:08.525302 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kxp67" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="registry-server" containerID="cri-o://a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900" gracePeriod=2 Dec 05 08:14:08 crc kubenswrapper[4863]: I1205 08:14:08.525752 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xhsv5" event={"ID":"59cd1339-d5a8-41ac-aed3-9148fd26816a","Type":"ContainerStarted","Data":"40e6ecd9c6c336d33768c2a0f3fcba5fc37583a4f23872519d452200756fbf64"} Dec 05 08:14:08 crc kubenswrapper[4863]: I1205 08:14:08.525785 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xhsv5" event={"ID":"59cd1339-d5a8-41ac-aed3-9148fd26816a","Type":"ContainerStarted","Data":"2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9"} Dec 05 08:14:08 crc kubenswrapper[4863]: I1205 08:14:08.548695 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-xhsv5" podStartSLOduration=1.548676851 podStartE2EDuration="1.548676851s" podCreationTimestamp="2025-12-05 08:14:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:08.539654221 +0000 UTC m=+5276.265651271" watchObservedRunningTime="2025-12-05 08:14:08.548676851 +0000 UTC m=+5276.274673891" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.047984 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.109390 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx49w\" (UniqueName: \"kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w\") pod \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.109683 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities\") pod \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.109714 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content\") pod \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\" (UID: \"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c\") " Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.115522 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities" (OuterVolumeSpecName: "utilities") pod "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" (UID: "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.122192 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w" (OuterVolumeSpecName: "kube-api-access-jx49w") pod "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" (UID: "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c"). InnerVolumeSpecName "kube-api-access-jx49w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.184035 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" (UID: "d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.211383 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.211418 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.211431 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx49w\" (UniqueName: \"kubernetes.io/projected/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c-kube-api-access-jx49w\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.537266 4863 generic.go:334] "Generic (PLEG): container finished" podID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerID="a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900" exitCode=0 Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.537344 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerDied","Data":"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900"} Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.537364 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kxp67" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.537398 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kxp67" event={"ID":"d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c","Type":"ContainerDied","Data":"4fa825efc425bbced68640b8867f22e7c6360c08445685ba985aef4568d9dd97"} Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.537429 4863 scope.go:117] "RemoveContainer" containerID="a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.560790 4863 scope.go:117] "RemoveContainer" containerID="7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.576082 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.591947 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kxp67"] Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.602023 4863 scope.go:117] "RemoveContainer" containerID="a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.634627 4863 scope.go:117] "RemoveContainer" containerID="a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900" Dec 05 08:14:09 crc kubenswrapper[4863]: E1205 08:14:09.635104 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900\": container with ID starting with a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900 not found: ID does not exist" containerID="a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.635168 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900"} err="failed to get container status \"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900\": rpc error: code = NotFound desc = could not find container \"a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900\": container with ID starting with a5cb6626e60a70cdaae79a88540e8154ddb0dd4a3bc45a6c09306564606d4900 not found: ID does not exist" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.635208 4863 scope.go:117] "RemoveContainer" containerID="7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8" Dec 05 08:14:09 crc kubenswrapper[4863]: E1205 08:14:09.635671 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8\": container with ID starting with 7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8 not found: ID does not exist" containerID="7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.635718 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8"} err="failed to get container status \"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8\": rpc error: code = NotFound desc = could not find container \"7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8\": container with ID starting with 7421896426f2fe3ea8fe6905dc85395ee96b0770019d96a5536b7582e9ce78d8 not found: ID does not exist" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.635751 4863 scope.go:117] "RemoveContainer" containerID="a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b" Dec 05 08:14:09 crc kubenswrapper[4863]: E1205 08:14:09.636138 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b\": container with ID starting with a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b not found: ID does not exist" containerID="a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b" Dec 05 08:14:09 crc kubenswrapper[4863]: I1205 08:14:09.636181 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b"} err="failed to get container status \"a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b\": rpc error: code = NotFound desc = could not find container \"a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b\": container with ID starting with a876ca9f5f0f7acae4a5d94a1fca421032b388e64407dbc90d9e8a8490bf315b not found: ID does not exist" Dec 05 08:14:10 crc kubenswrapper[4863]: I1205 08:14:10.619828 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" path="/var/lib/kubelet/pods/d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c/volumes" Dec 05 08:14:10 crc kubenswrapper[4863]: I1205 08:14:10.882746 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:14:10 crc kubenswrapper[4863]: I1205 08:14:10.883090 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:14:10 crc kubenswrapper[4863]: I1205 08:14:10.950073 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:14:10 crc kubenswrapper[4863]: I1205 08:14:10.950119 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:14:11 crc kubenswrapper[4863]: I1205 08:14:11.761787 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:14:11 crc kubenswrapper[4863]: I1205 08:14:11.791637 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:14:11 crc kubenswrapper[4863]: I1205 08:14:11.964700 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.60:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:11 crc kubenswrapper[4863]: I1205 08:14:11.964932 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.60:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:12 crc kubenswrapper[4863]: I1205 08:14:12.046707 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.61:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:12 crc kubenswrapper[4863]: I1205 08:14:12.046722 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.61:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:12 crc kubenswrapper[4863]: I1205 08:14:12.666100 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:14:13 crc kubenswrapper[4863]: I1205 08:14:13.621112 4863 generic.go:334] "Generic (PLEG): container finished" podID="59cd1339-d5a8-41ac-aed3-9148fd26816a" containerID="40e6ecd9c6c336d33768c2a0f3fcba5fc37583a4f23872519d452200756fbf64" exitCode=0 Dec 05 08:14:13 crc kubenswrapper[4863]: I1205 08:14:13.621235 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xhsv5" event={"ID":"59cd1339-d5a8-41ac-aed3-9148fd26816a","Type":"ContainerDied","Data":"40e6ecd9c6c336d33768c2a0f3fcba5fc37583a4f23872519d452200756fbf64"} Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.024378 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.167519 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts\") pod \"59cd1339-d5a8-41ac-aed3-9148fd26816a\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.168683 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data\") pod \"59cd1339-d5a8-41ac-aed3-9148fd26816a\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.168782 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kw45\" (UniqueName: \"kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45\") pod \"59cd1339-d5a8-41ac-aed3-9148fd26816a\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.168841 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle\") pod \"59cd1339-d5a8-41ac-aed3-9148fd26816a\" (UID: \"59cd1339-d5a8-41ac-aed3-9148fd26816a\") " Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.174157 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45" (OuterVolumeSpecName: "kube-api-access-4kw45") pod "59cd1339-d5a8-41ac-aed3-9148fd26816a" (UID: "59cd1339-d5a8-41ac-aed3-9148fd26816a"). InnerVolumeSpecName "kube-api-access-4kw45". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.174665 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts" (OuterVolumeSpecName: "scripts") pod "59cd1339-d5a8-41ac-aed3-9148fd26816a" (UID: "59cd1339-d5a8-41ac-aed3-9148fd26816a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.206164 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59cd1339-d5a8-41ac-aed3-9148fd26816a" (UID: "59cd1339-d5a8-41ac-aed3-9148fd26816a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.220225 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data" (OuterVolumeSpecName: "config-data") pod "59cd1339-d5a8-41ac-aed3-9148fd26816a" (UID: "59cd1339-d5a8-41ac-aed3-9148fd26816a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.271232 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.271282 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kw45\" (UniqueName: \"kubernetes.io/projected/59cd1339-d5a8-41ac-aed3-9148fd26816a-kube-api-access-4kw45\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.271305 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.271321 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59cd1339-d5a8-41ac-aed3-9148fd26816a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.646991 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xhsv5" event={"ID":"59cd1339-d5a8-41ac-aed3-9148fd26816a","Type":"ContainerDied","Data":"2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9"} Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.647077 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dc9196dae34974bad46874cb7991f5f87bbdf7f088129cbdc0cfd24abcea8c9" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.647088 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xhsv5" Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.835533 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.835730 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerName="nova-scheduler-scheduler" containerID="cri-o://757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" gracePeriod=30 Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.845726 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.845951 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-log" containerID="cri-o://f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791" gracePeriod=30 Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.846057 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-api" containerID="cri-o://91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491" gracePeriod=30 Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.913327 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.913653 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-metadata" containerID="cri-o://cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64" gracePeriod=30 Dec 05 08:14:15 crc kubenswrapper[4863]: I1205 08:14:15.913809 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-log" containerID="cri-o://77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14" gracePeriod=30 Dec 05 08:14:16 crc kubenswrapper[4863]: I1205 08:14:16.657762 4863 generic.go:334] "Generic (PLEG): container finished" podID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerID="77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14" exitCode=143 Dec 05 08:14:16 crc kubenswrapper[4863]: I1205 08:14:16.657887 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerDied","Data":"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14"} Dec 05 08:14:16 crc kubenswrapper[4863]: I1205 08:14:16.660677 4863 generic.go:334] "Generic (PLEG): container finished" podID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerID="f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791" exitCode=143 Dec 05 08:14:16 crc kubenswrapper[4863]: I1205 08:14:16.660733 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerDied","Data":"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791"} Dec 05 08:14:16 crc kubenswrapper[4863]: E1205 08:14:16.766626 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:14:16 crc kubenswrapper[4863]: E1205 08:14:16.768798 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:14:16 crc kubenswrapper[4863]: E1205 08:14:16.770401 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:14:16 crc kubenswrapper[4863]: E1205 08:14:16.770490 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerName="nova-scheduler-scheduler" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.555542 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.562581 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.569305 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.601799 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.602049 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681403 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle\") pod \"47577fd8-d815-45c1-b3b5-c2f5fa260506\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681462 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs\") pod \"47577fd8-d815-45c1-b3b5-c2f5fa260506\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681523 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-294c7\" (UniqueName: \"kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7\") pod \"d63a4154-26ff-4261-aa77-2ab1455f18ba\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681552 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data\") pod \"47577fd8-d815-45c1-b3b5-c2f5fa260506\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681572 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data\") pod \"d63a4154-26ff-4261-aa77-2ab1455f18ba\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681593 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data\") pod \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681659 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle\") pod \"d63a4154-26ff-4261-aa77-2ab1455f18ba\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681701 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs\") pod \"d63a4154-26ff-4261-aa77-2ab1455f18ba\" (UID: \"d63a4154-26ff-4261-aa77-2ab1455f18ba\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681730 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvwpj\" (UniqueName: \"kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj\") pod \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681758 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5\") pod \"47577fd8-d815-45c1-b3b5-c2f5fa260506\" (UID: \"47577fd8-d815-45c1-b3b5-c2f5fa260506\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.681799 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle\") pod \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\" (UID: \"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e\") " Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.683101 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs" (OuterVolumeSpecName: "logs") pod "47577fd8-d815-45c1-b3b5-c2f5fa260506" (UID: "47577fd8-d815-45c1-b3b5-c2f5fa260506"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.684381 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs" (OuterVolumeSpecName: "logs") pod "d63a4154-26ff-4261-aa77-2ab1455f18ba" (UID: "d63a4154-26ff-4261-aa77-2ab1455f18ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.690675 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5" (OuterVolumeSpecName: "kube-api-access-2msd5") pod "47577fd8-d815-45c1-b3b5-c2f5fa260506" (UID: "47577fd8-d815-45c1-b3b5-c2f5fa260506"). InnerVolumeSpecName "kube-api-access-2msd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.690783 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7" (OuterVolumeSpecName: "kube-api-access-294c7") pod "d63a4154-26ff-4261-aa77-2ab1455f18ba" (UID: "d63a4154-26ff-4261-aa77-2ab1455f18ba"). InnerVolumeSpecName "kube-api-access-294c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.693626 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj" (OuterVolumeSpecName: "kube-api-access-vvwpj") pod "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" (UID: "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e"). InnerVolumeSpecName "kube-api-access-vvwpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.699953 4863 generic.go:334] "Generic (PLEG): container finished" podID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerID="91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491" exitCode=0 Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.700021 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerDied","Data":"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.700051 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47577fd8-d815-45c1-b3b5-c2f5fa260506","Type":"ContainerDied","Data":"b2b0630ba9de6ab1d62575650b902ff2aec755aa3276f66b859ea4925dee9d5d"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.700070 4863 scope.go:117] "RemoveContainer" containerID="91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.700211 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.704507 4863 generic.go:334] "Generic (PLEG): container finished" podID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" exitCode=0 Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.704536 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.704589 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e","Type":"ContainerDied","Data":"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.704617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e","Type":"ContainerDied","Data":"924195d25c629cf2f4d709e699423e9a01e218052ce88dc29435f419dec62ebc"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.707171 4863 generic.go:334] "Generic (PLEG): container finished" podID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerID="cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64" exitCode=0 Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.707262 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerDied","Data":"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.707422 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d63a4154-26ff-4261-aa77-2ab1455f18ba","Type":"ContainerDied","Data":"6cd6b9ae57058e295884e2d7dcef11293e67a8710a9a9c628956f344c30d63d2"} Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.708501 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.724068 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data" (OuterVolumeSpecName: "config-data") pod "47577fd8-d815-45c1-b3b5-c2f5fa260506" (UID: "47577fd8-d815-45c1-b3b5-c2f5fa260506"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.726854 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" (UID: "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.733091 4863 scope.go:117] "RemoveContainer" containerID="f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.735208 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data" (OuterVolumeSpecName: "config-data") pod "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" (UID: "3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.738296 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data" (OuterVolumeSpecName: "config-data") pod "d63a4154-26ff-4261-aa77-2ab1455f18ba" (UID: "d63a4154-26ff-4261-aa77-2ab1455f18ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.744898 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47577fd8-d815-45c1-b3b5-c2f5fa260506" (UID: "47577fd8-d815-45c1-b3b5-c2f5fa260506"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.746539 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d63a4154-26ff-4261-aa77-2ab1455f18ba" (UID: "d63a4154-26ff-4261-aa77-2ab1455f18ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.758334 4863 scope.go:117] "RemoveContainer" containerID="91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.758872 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491\": container with ID starting with 91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491 not found: ID does not exist" containerID="91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.758950 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491"} err="failed to get container status \"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491\": rpc error: code = NotFound desc = could not find container \"91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491\": container with ID starting with 91c723e77138302cb697cbb1c66b9225d0fc00768718a854a16c2191b9c62491 not found: ID does not exist" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.759000 4863 scope.go:117] "RemoveContainer" containerID="f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.759346 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791\": container with ID starting with f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791 not found: ID does not exist" containerID="f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.759392 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791"} err="failed to get container status \"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791\": rpc error: code = NotFound desc = could not find container \"f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791\": container with ID starting with f11efdaf7a48e3ffe8786f5a535990de0abfb318678da9ee2ffc21d50fbcb791 not found: ID does not exist" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.759411 4863 scope.go:117] "RemoveContainer" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.779225 4863 scope.go:117] "RemoveContainer" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.780024 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e\": container with ID starting with 757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e not found: ID does not exist" containerID="757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.780062 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e"} err="failed to get container status \"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e\": rpc error: code = NotFound desc = could not find container \"757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e\": container with ID starting with 757f7fbc6c02d17d38a53330977ca41824f4635f4f4cd76b42e6e1095cae071e not found: ID does not exist" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.780087 4863 scope.go:117] "RemoveContainer" containerID="cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784542 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784574 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47577fd8-d815-45c1-b3b5-c2f5fa260506-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784589 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-294c7\" (UniqueName: \"kubernetes.io/projected/d63a4154-26ff-4261-aa77-2ab1455f18ba-kube-api-access-294c7\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784603 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47577fd8-d815-45c1-b3b5-c2f5fa260506-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784616 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784629 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784640 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d63a4154-26ff-4261-aa77-2ab1455f18ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784652 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d63a4154-26ff-4261-aa77-2ab1455f18ba-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784662 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvwpj\" (UniqueName: \"kubernetes.io/projected/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-kube-api-access-vvwpj\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784673 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/47577fd8-d815-45c1-b3b5-c2f5fa260506-kube-api-access-2msd5\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.784686 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.802435 4863 scope.go:117] "RemoveContainer" containerID="77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.823034 4863 scope.go:117] "RemoveContainer" containerID="cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.823413 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64\": container with ID starting with cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64 not found: ID does not exist" containerID="cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.823442 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64"} err="failed to get container status \"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64\": rpc error: code = NotFound desc = could not find container \"cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64\": container with ID starting with cb2ea8e9db4c3e725a25e9e312e5bcf78d8b63e3149a8bf3034c7895a5331b64 not found: ID does not exist" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.823464 4863 scope.go:117] "RemoveContainer" containerID="77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14" Dec 05 08:14:19 crc kubenswrapper[4863]: E1205 08:14:19.823707 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14\": container with ID starting with 77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14 not found: ID does not exist" containerID="77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14" Dec 05 08:14:19 crc kubenswrapper[4863]: I1205 08:14:19.823732 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14"} err="failed to get container status \"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14\": rpc error: code = NotFound desc = could not find container \"77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14\": container with ID starting with 77c05bb082bf78052684b35450babc7e32155e42335e00cbcb3b4d9e5d2e5b14 not found: ID does not exist" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.055852 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.066792 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.079980 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.090167 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.104547 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105147 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="extract-utilities" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105175 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="extract-utilities" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105200 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59cd1339-d5a8-41ac-aed3-9148fd26816a" containerName="nova-manage" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105213 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="59cd1339-d5a8-41ac-aed3-9148fd26816a" containerName="nova-manage" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105229 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="registry-server" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105238 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="registry-server" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105269 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-api" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105279 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-api" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105290 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="extract-content" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105298 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="extract-content" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105311 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerName="nova-scheduler-scheduler" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105320 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerName="nova-scheduler-scheduler" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105340 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-log" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105349 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-log" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105370 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-metadata" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105378 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-metadata" Dec 05 08:14:20 crc kubenswrapper[4863]: E1205 08:14:20.105387 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-log" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105395 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-log" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105669 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-api" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105697 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-log" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105721 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" containerName="nova-api-log" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105732 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f2ece0-f9a3-438b-8c1d-6c32eb837a3c" containerName="registry-server" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105746 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" containerName="nova-metadata-metadata" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105760 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="59cd1339-d5a8-41ac-aed3-9148fd26816a" containerName="nova-manage" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.105769 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" containerName="nova-scheduler-scheduler" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.107179 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.108687 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.115189 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.121437 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.156100 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.165768 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.167666 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.182383 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.197114 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.197211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp2p4\" (UniqueName: \"kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.197273 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.197310 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.215002 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.216946 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.223403 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.262587 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.286194 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301581 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301675 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp2p4\" (UniqueName: \"kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301719 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh2sk\" (UniqueName: \"kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301740 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhprm\" (UniqueName: \"kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301761 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301793 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301810 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301837 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301859 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301888 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.301924 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.304461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.311791 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.312268 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.358957 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp2p4\" (UniqueName: \"kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4\") pod \"nova-api-0\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403592 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403677 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh2sk\" (UniqueName: \"kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403810 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhprm\" (UniqueName: \"kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403826 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403850 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.403891 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.405231 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.407862 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.408129 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.409263 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.410407 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.425030 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhprm\" (UniqueName: \"kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm\") pod \"nova-metadata-0\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.425707 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh2sk\" (UniqueName: \"kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk\") pod \"nova-scheduler-0\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.466680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.510371 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.546806 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.628306 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e" path="/var/lib/kubelet/pods/3e192b3a-9b75-4c7b-99dc-3ca3832cbf7e/volumes" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.629036 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47577fd8-d815-45c1-b3b5-c2f5fa260506" path="/var/lib/kubelet/pods/47577fd8-d815-45c1-b3b5-c2f5fa260506/volumes" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.629684 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d63a4154-26ff-4261-aa77-2ab1455f18ba" path="/var/lib/kubelet/pods/d63a4154-26ff-4261-aa77-2ab1455f18ba/volumes" Dec 05 08:14:20 crc kubenswrapper[4863]: I1205 08:14:20.913040 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.018743 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:14:21 crc kubenswrapper[4863]: W1205 08:14:21.020735 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b7f6de9_2f45_4b71_a83a_22c96efddbbe.slice/crio-6a71737c8e96d55f8e12c2b86bbf184e6e7bd2ab469a19083e808fb84beed741 WatchSource:0}: Error finding container 6a71737c8e96d55f8e12c2b86bbf184e6e7bd2ab469a19083e808fb84beed741: Status 404 returned error can't find the container with id 6a71737c8e96d55f8e12c2b86bbf184e6e7bd2ab469a19083e808fb84beed741 Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.096578 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:14:21 crc kubenswrapper[4863]: W1205 08:14:21.107181 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd82fa16f_21cf_4b96_a99c_4e4d194d27cb.slice/crio-df3bbac69ef1384c966daac06c749e5541b621df58854e08818a56e4b545ac57 WatchSource:0}: Error finding container df3bbac69ef1384c966daac06c749e5541b621df58854e08818a56e4b545ac57: Status 404 returned error can't find the container with id df3bbac69ef1384c966daac06c749e5541b621df58854e08818a56e4b545ac57 Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.736360 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerStarted","Data":"a5169e86500186b3f15dbf894a2ceac8b0467fe904bb600804be8f15fde76d9f"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.736659 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerStarted","Data":"e2979e6bbbac68da19e0c28b79ca66939763bca7ef9cedf774aaaed85444769d"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.736673 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerStarted","Data":"df3bbac69ef1384c966daac06c749e5541b621df58854e08818a56e4b545ac57"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.740135 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerStarted","Data":"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.740195 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerStarted","Data":"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.740208 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerStarted","Data":"092afbc32f8a3a3e69b8ae911661a8a37a4e3d4bbb6cff0064b7814269947bd7"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.746526 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7f6de9-2f45-4b71-a83a-22c96efddbbe","Type":"ContainerStarted","Data":"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.746566 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7f6de9-2f45-4b71-a83a-22c96efddbbe","Type":"ContainerStarted","Data":"6a71737c8e96d55f8e12c2b86bbf184e6e7bd2ab469a19083e808fb84beed741"} Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.765520 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.7654960069999999 podStartE2EDuration="1.765496007s" podCreationTimestamp="2025-12-05 08:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:21.760537947 +0000 UTC m=+5289.486534997" watchObservedRunningTime="2025-12-05 08:14:21.765496007 +0000 UTC m=+5289.491493047" Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.789877 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.7898590890000001 podStartE2EDuration="1.789859089s" podCreationTimestamp="2025-12-05 08:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:21.780243165 +0000 UTC m=+5289.506240205" watchObservedRunningTime="2025-12-05 08:14:21.789859089 +0000 UTC m=+5289.515856129" Dec 05 08:14:21 crc kubenswrapper[4863]: I1205 08:14:21.809247 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.809224869 podStartE2EDuration="1.809224869s" podCreationTimestamp="2025-12-05 08:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:21.802133056 +0000 UTC m=+5289.528130116" watchObservedRunningTime="2025-12-05 08:14:21.809224869 +0000 UTC m=+5289.535221919" Dec 05 08:14:25 crc kubenswrapper[4863]: I1205 08:14:25.510897 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:14:25 crc kubenswrapper[4863]: I1205 08:14:25.548022 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:14:25 crc kubenswrapper[4863]: I1205 08:14:25.548096 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.467876 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.468692 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.511214 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.539499 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.547977 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.548017 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:14:30 crc kubenswrapper[4863]: I1205 08:14:30.911306 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:14:31 crc kubenswrapper[4863]: I1205 08:14:31.549624 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.64:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:31 crc kubenswrapper[4863]: I1205 08:14:31.549650 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.64:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:31 crc kubenswrapper[4863]: I1205 08:14:31.637822 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:31 crc kubenswrapper[4863]: I1205 08:14:31.637858 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:14:32 crc kubenswrapper[4863]: I1205 08:14:32.609377 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:14:32 crc kubenswrapper[4863]: E1205 08:14:32.609700 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.471069 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.473397 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.473965 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.474008 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.476830 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.478322 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.549547 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.550918 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.551605 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.688258 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.689947 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.703358 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.807593 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.807641 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.807857 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.808015 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bswr\" (UniqueName: \"kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.808231 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.909963 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.910021 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.910049 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.910086 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bswr\" (UniqueName: \"kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.910137 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.911245 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.912010 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.913234 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.913601 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.937662 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bswr\" (UniqueName: \"kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr\") pod \"dnsmasq-dns-5f7d6fbf6f-jvmj4\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:40 crc kubenswrapper[4863]: I1205 08:14:40.942623 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:14:41 crc kubenswrapper[4863]: I1205 08:14:41.025993 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:41 crc kubenswrapper[4863]: I1205 08:14:41.509415 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:14:41 crc kubenswrapper[4863]: W1205 08:14:41.514067 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb439f3b_1a4e_4dbc_977d_07e95b8b9ba3.slice/crio-4e2107b5fde15e7a1d1434eb4a3068e7089adf42149aca2e581ae177504a1339 WatchSource:0}: Error finding container 4e2107b5fde15e7a1d1434eb4a3068e7089adf42149aca2e581ae177504a1339: Status 404 returned error can't find the container with id 4e2107b5fde15e7a1d1434eb4a3068e7089adf42149aca2e581ae177504a1339 Dec 05 08:14:41 crc kubenswrapper[4863]: I1205 08:14:41.947683 4863 generic.go:334] "Generic (PLEG): container finished" podID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerID="82d1b2c965f21db2c2d9d36b828bb5b20b7d5b0e2a67d978e1cd2a1f76e79eb7" exitCode=0 Dec 05 08:14:41 crc kubenswrapper[4863]: I1205 08:14:41.947723 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" event={"ID":"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3","Type":"ContainerDied","Data":"82d1b2c965f21db2c2d9d36b828bb5b20b7d5b0e2a67d978e1cd2a1f76e79eb7"} Dec 05 08:14:41 crc kubenswrapper[4863]: I1205 08:14:41.948018 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" event={"ID":"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3","Type":"ContainerStarted","Data":"4e2107b5fde15e7a1d1434eb4a3068e7089adf42149aca2e581ae177504a1339"} Dec 05 08:14:42 crc kubenswrapper[4863]: I1205 08:14:42.956183 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" event={"ID":"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3","Type":"ContainerStarted","Data":"c8cb47a9db48fce7e49e670296ce284984c789ff835aae3283a4804e49dff802"} Dec 05 08:14:42 crc kubenswrapper[4863]: I1205 08:14:42.973409 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" podStartSLOduration=2.973394412 podStartE2EDuration="2.973394412s" podCreationTimestamp="2025-12-05 08:14:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:14:42.973074535 +0000 UTC m=+5310.699071575" watchObservedRunningTime="2025-12-05 08:14:42.973394412 +0000 UTC m=+5310.699391452" Dec 05 08:14:43 crc kubenswrapper[4863]: I1205 08:14:43.965760 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:46 crc kubenswrapper[4863]: I1205 08:14:46.603779 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:14:46 crc kubenswrapper[4863]: E1205 08:14:46.606182 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.028853 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.135701 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.136040 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="dnsmasq-dns" containerID="cri-o://f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8" gracePeriod=10 Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.676570 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.692699 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc\") pod \"5d7f256d-7571-494f-87d9-8e750ca22ba9\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.692849 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq4ws\" (UniqueName: \"kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws\") pod \"5d7f256d-7571-494f-87d9-8e750ca22ba9\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.693002 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb\") pod \"5d7f256d-7571-494f-87d9-8e750ca22ba9\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.693043 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb\") pod \"5d7f256d-7571-494f-87d9-8e750ca22ba9\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.693064 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config\") pod \"5d7f256d-7571-494f-87d9-8e750ca22ba9\" (UID: \"5d7f256d-7571-494f-87d9-8e750ca22ba9\") " Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.700942 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws" (OuterVolumeSpecName: "kube-api-access-rq4ws") pod "5d7f256d-7571-494f-87d9-8e750ca22ba9" (UID: "5d7f256d-7571-494f-87d9-8e750ca22ba9"). InnerVolumeSpecName "kube-api-access-rq4ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.750878 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d7f256d-7571-494f-87d9-8e750ca22ba9" (UID: "5d7f256d-7571-494f-87d9-8e750ca22ba9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.759829 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5d7f256d-7571-494f-87d9-8e750ca22ba9" (UID: "5d7f256d-7571-494f-87d9-8e750ca22ba9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.775226 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d7f256d-7571-494f-87d9-8e750ca22ba9" (UID: "5d7f256d-7571-494f-87d9-8e750ca22ba9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.776083 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config" (OuterVolumeSpecName: "config") pod "5d7f256d-7571-494f-87d9-8e750ca22ba9" (UID: "5d7f256d-7571-494f-87d9-8e750ca22ba9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.795348 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.795381 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.795394 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.795404 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d7f256d-7571-494f-87d9-8e750ca22ba9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:51 crc kubenswrapper[4863]: I1205 08:14:51.795416 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq4ws\" (UniqueName: \"kubernetes.io/projected/5d7f256d-7571-494f-87d9-8e750ca22ba9-kube-api-access-rq4ws\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.051187 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerID="f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8" exitCode=0 Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.051286 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" event={"ID":"5d7f256d-7571-494f-87d9-8e750ca22ba9","Type":"ContainerDied","Data":"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8"} Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.051593 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" event={"ID":"5d7f256d-7571-494f-87d9-8e750ca22ba9","Type":"ContainerDied","Data":"2c01240d1cf36ad09a43b868c6dacedb428b58f8d044108693183d5f4da5dbfb"} Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.051621 4863 scope.go:117] "RemoveContainer" containerID="f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.051315 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bdcdccdd9-p49k6" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.109572 4863 scope.go:117] "RemoveContainer" containerID="a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.127244 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.144681 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bdcdccdd9-p49k6"] Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.151546 4863 scope.go:117] "RemoveContainer" containerID="f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8" Dec 05 08:14:52 crc kubenswrapper[4863]: E1205 08:14:52.152345 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8\": container with ID starting with f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8 not found: ID does not exist" containerID="f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.152396 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8"} err="failed to get container status \"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8\": rpc error: code = NotFound desc = could not find container \"f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8\": container with ID starting with f966d4c54005892606034c2e299728698fcd4d17e43f01efe2e453e222fe77b8 not found: ID does not exist" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.152428 4863 scope.go:117] "RemoveContainer" containerID="a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421" Dec 05 08:14:52 crc kubenswrapper[4863]: E1205 08:14:52.153459 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421\": container with ID starting with a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421 not found: ID does not exist" containerID="a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.153576 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421"} err="failed to get container status \"a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421\": rpc error: code = NotFound desc = could not find container \"a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421\": container with ID starting with a86deead25521766b2f1c11545ab0386bd1da72df8d02af1acf9f64a0bc0d421 not found: ID does not exist" Dec 05 08:14:52 crc kubenswrapper[4863]: I1205 08:14:52.614311 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" path="/var/lib/kubelet/pods/5d7f256d-7571-494f-87d9-8e750ca22ba9/volumes" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.336501 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-78dd7"] Dec 05 08:14:53 crc kubenswrapper[4863]: E1205 08:14:53.337167 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="init" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.337179 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="init" Dec 05 08:14:53 crc kubenswrapper[4863]: E1205 08:14:53.337217 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="dnsmasq-dns" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.337223 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="dnsmasq-dns" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.337384 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d7f256d-7571-494f-87d9-8e750ca22ba9" containerName="dnsmasq-dns" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.337974 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.349570 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-78dd7"] Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.428576 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.428631 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stk8t\" (UniqueName: \"kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.441894 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-e11c-account-create-update-q5ps9"] Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.443124 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.448063 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.452446 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e11c-account-create-update-q5ps9"] Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.530967 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.531045 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nhqd\" (UniqueName: \"kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.531226 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.531293 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stk8t\" (UniqueName: \"kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.532189 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.552322 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stk8t\" (UniqueName: \"kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t\") pod \"cinder-db-create-78dd7\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.633513 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.633615 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nhqd\" (UniqueName: \"kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.634986 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.648362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nhqd\" (UniqueName: \"kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd\") pod \"cinder-e11c-account-create-update-q5ps9\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.667572 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:53 crc kubenswrapper[4863]: I1205 08:14:53.759993 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:54 crc kubenswrapper[4863]: I1205 08:14:54.145753 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-78dd7"] Dec 05 08:14:54 crc kubenswrapper[4863]: W1205 08:14:54.258264 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f1f484b_c1ae_47dd_9a9d_cea73101d2cf.slice/crio-e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95 WatchSource:0}: Error finding container e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95: Status 404 returned error can't find the container with id e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95 Dec 05 08:14:54 crc kubenswrapper[4863]: I1205 08:14:54.258453 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e11c-account-create-update-q5ps9"] Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.081953 4863 generic.go:334] "Generic (PLEG): container finished" podID="3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" containerID="9432b0dcfd79c99d718106e63060290b5abc8375497d9fea1932f1369c81bbbb" exitCode=0 Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.082025 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e11c-account-create-update-q5ps9" event={"ID":"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf","Type":"ContainerDied","Data":"9432b0dcfd79c99d718106e63060290b5abc8375497d9fea1932f1369c81bbbb"} Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.082376 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e11c-account-create-update-q5ps9" event={"ID":"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf","Type":"ContainerStarted","Data":"e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95"} Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.085229 4863 generic.go:334] "Generic (PLEG): container finished" podID="a035e6d7-f5f5-45ee-9636-dc4c3c33456b" containerID="d9b29641eaee6962c47db3fa5a5bb50cea33b34e2a26b82288ebe7c355e96181" exitCode=0 Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.085261 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78dd7" event={"ID":"a035e6d7-f5f5-45ee-9636-dc4c3c33456b","Type":"ContainerDied","Data":"d9b29641eaee6962c47db3fa5a5bb50cea33b34e2a26b82288ebe7c355e96181"} Dec 05 08:14:55 crc kubenswrapper[4863]: I1205 08:14:55.085282 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78dd7" event={"ID":"a035e6d7-f5f5-45ee-9636-dc4c3c33456b","Type":"ContainerStarted","Data":"2e44b3da22a3c3972da438875ff1da64176ac595738ae6a5bcf32e33d1ca7e34"} Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.550896 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.556835 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.685962 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stk8t\" (UniqueName: \"kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t\") pod \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.686273 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts\") pod \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\" (UID: \"a035e6d7-f5f5-45ee-9636-dc4c3c33456b\") " Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.686885 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nhqd\" (UniqueName: \"kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd\") pod \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.687048 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts\") pod \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\" (UID: \"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf\") " Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.686934 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a035e6d7-f5f5-45ee-9636-dc4c3c33456b" (UID: "a035e6d7-f5f5-45ee-9636-dc4c3c33456b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.687742 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" (UID: "3f1f484b-c1ae-47dd-9a9d-cea73101d2cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.693216 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd" (OuterVolumeSpecName: "kube-api-access-4nhqd") pod "3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" (UID: "3f1f484b-c1ae-47dd-9a9d-cea73101d2cf"). InnerVolumeSpecName "kube-api-access-4nhqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.695178 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t" (OuterVolumeSpecName: "kube-api-access-stk8t") pod "a035e6d7-f5f5-45ee-9636-dc4c3c33456b" (UID: "a035e6d7-f5f5-45ee-9636-dc4c3c33456b"). InnerVolumeSpecName "kube-api-access-stk8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.789504 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.789559 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stk8t\" (UniqueName: \"kubernetes.io/projected/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-kube-api-access-stk8t\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.789578 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a035e6d7-f5f5-45ee-9636-dc4c3c33456b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:56 crc kubenswrapper[4863]: I1205 08:14:56.789595 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nhqd\" (UniqueName: \"kubernetes.io/projected/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf-kube-api-access-4nhqd\") on node \"crc\" DevicePath \"\"" Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.106976 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78dd7" event={"ID":"a035e6d7-f5f5-45ee-9636-dc4c3c33456b","Type":"ContainerDied","Data":"2e44b3da22a3c3972da438875ff1da64176ac595738ae6a5bcf32e33d1ca7e34"} Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.107003 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78dd7" Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.107014 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e44b3da22a3c3972da438875ff1da64176ac595738ae6a5bcf32e33d1ca7e34" Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.109376 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e11c-account-create-update-q5ps9" event={"ID":"3f1f484b-c1ae-47dd-9a9d-cea73101d2cf","Type":"ContainerDied","Data":"e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95"} Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.109418 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e59d20fe6a4672787a3b97652aa4096de4b2162bf3f7c905b087e06c482e5f95" Dec 05 08:14:57 crc kubenswrapper[4863]: I1205 08:14:57.109426 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e11c-account-create-update-q5ps9" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.638367 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-rlnq8"] Dec 05 08:14:58 crc kubenswrapper[4863]: E1205 08:14:58.639033 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a035e6d7-f5f5-45ee-9636-dc4c3c33456b" containerName="mariadb-database-create" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.639058 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a035e6d7-f5f5-45ee-9636-dc4c3c33456b" containerName="mariadb-database-create" Dec 05 08:14:58 crc kubenswrapper[4863]: E1205 08:14:58.639094 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" containerName="mariadb-account-create-update" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.639105 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" containerName="mariadb-account-create-update" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.639375 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" containerName="mariadb-account-create-update" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.639399 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a035e6d7-f5f5-45ee-9636-dc4c3c33456b" containerName="mariadb-database-create" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.640336 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.642645 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rgmb7" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.642815 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.643578 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.659160 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rlnq8"] Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828631 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828737 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828816 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828839 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828884 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l5fv\" (UniqueName: \"kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.828931 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.930892 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931032 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931071 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931149 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931178 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931223 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l5fv\" (UniqueName: \"kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.931286 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.937037 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.938921 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.939331 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.943710 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.950961 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l5fv\" (UniqueName: \"kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv\") pod \"cinder-db-sync-rlnq8\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:58 crc kubenswrapper[4863]: I1205 08:14:58.974161 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:14:59 crc kubenswrapper[4863]: I1205 08:14:59.430206 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rlnq8"] Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.144654 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rlnq8" event={"ID":"d9ff5445-d86b-48bb-b323-9911e1ba1a66","Type":"ContainerStarted","Data":"98b1071e93d17376b9860e6dfd7ba6a8b9136d3b70cdf65f1ea9bad438287f52"} Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.149111 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m"] Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.150681 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.152677 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.153371 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.178318 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m"] Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.257088 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.257263 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.257345 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9lmh\" (UniqueName: \"kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.359555 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9lmh\" (UniqueName: \"kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.359681 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.359753 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.360681 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.366086 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.377112 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9lmh\" (UniqueName: \"kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh\") pod \"collect-profiles-29415375-ght5m\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.476533 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:00 crc kubenswrapper[4863]: I1205 08:15:00.606398 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:15:00 crc kubenswrapper[4863]: E1205 08:15:00.606838 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:15:01 crc kubenswrapper[4863]: I1205 08:15:00.922172 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m"] Dec 05 08:15:01 crc kubenswrapper[4863]: I1205 08:15:01.183304 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" event={"ID":"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69","Type":"ContainerStarted","Data":"6f82281425197d3e5c26f91b2e7f5025d5d511bee4287b0075c227db98d77b74"} Dec 05 08:15:02 crc kubenswrapper[4863]: I1205 08:15:02.194766 4863 generic.go:334] "Generic (PLEG): container finished" podID="fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" containerID="56ceaf9896b6a926c11e5239fd9e4188392784c271aaee8ee6ffcc6193508eb6" exitCode=0 Dec 05 08:15:02 crc kubenswrapper[4863]: I1205 08:15:02.194854 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" event={"ID":"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69","Type":"ContainerDied","Data":"56ceaf9896b6a926c11e5239fd9e4188392784c271aaee8ee6ffcc6193508eb6"} Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.554864 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.640003 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume\") pod \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.640227 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume\") pod \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.640341 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9lmh\" (UniqueName: \"kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh\") pod \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\" (UID: \"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69\") " Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.641415 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume" (OuterVolumeSpecName: "config-volume") pod "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" (UID: "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.645434 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" (UID: "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.645947 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh" (OuterVolumeSpecName: "kube-api-access-d9lmh") pod "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" (UID: "fe2b1bc9-6eeb-4e15-861a-8f5401c69e69"). InnerVolumeSpecName "kube-api-access-d9lmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.743143 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.743186 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9lmh\" (UniqueName: \"kubernetes.io/projected/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-kube-api-access-d9lmh\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:03 crc kubenswrapper[4863]: I1205 08:15:03.743201 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:04 crc kubenswrapper[4863]: I1205 08:15:04.215493 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" event={"ID":"fe2b1bc9-6eeb-4e15-861a-8f5401c69e69","Type":"ContainerDied","Data":"6f82281425197d3e5c26f91b2e7f5025d5d511bee4287b0075c227db98d77b74"} Dec 05 08:15:04 crc kubenswrapper[4863]: I1205 08:15:04.215529 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f82281425197d3e5c26f91b2e7f5025d5d511bee4287b0075c227db98d77b74" Dec 05 08:15:04 crc kubenswrapper[4863]: I1205 08:15:04.215577 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m" Dec 05 08:15:04 crc kubenswrapper[4863]: I1205 08:15:04.629884 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p"] Dec 05 08:15:04 crc kubenswrapper[4863]: I1205 08:15:04.639937 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415330-dq74p"] Dec 05 08:15:06 crc kubenswrapper[4863]: I1205 08:15:06.613441 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="487cc8d4-cf82-4675-810c-7505a5a9ed13" path="/var/lib/kubelet/pods/487cc8d4-cf82-4675-810c-7505a5a9ed13/volumes" Dec 05 08:15:12 crc kubenswrapper[4863]: I1205 08:15:12.618283 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:15:19 crc kubenswrapper[4863]: I1205 08:15:19.377200 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb"} Dec 05 08:15:20 crc kubenswrapper[4863]: I1205 08:15:20.391008 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rlnq8" event={"ID":"d9ff5445-d86b-48bb-b323-9911e1ba1a66","Type":"ContainerStarted","Data":"78e373ddd04cfa49e667ff50d89979b804d9946e14846e58dea72b87a0f2a7cb"} Dec 05 08:15:20 crc kubenswrapper[4863]: I1205 08:15:20.418320 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-rlnq8" podStartSLOduration=2.978813043 podStartE2EDuration="22.418293757s" podCreationTimestamp="2025-12-05 08:14:58 +0000 UTC" firstStartedPulling="2025-12-05 08:14:59.436619192 +0000 UTC m=+5327.162616232" lastFinishedPulling="2025-12-05 08:15:18.876099906 +0000 UTC m=+5346.602096946" observedRunningTime="2025-12-05 08:15:20.416639397 +0000 UTC m=+5348.142636507" watchObservedRunningTime="2025-12-05 08:15:20.418293757 +0000 UTC m=+5348.144290817" Dec 05 08:15:22 crc kubenswrapper[4863]: I1205 08:15:22.413298 4863 generic.go:334] "Generic (PLEG): container finished" podID="d9ff5445-d86b-48bb-b323-9911e1ba1a66" containerID="78e373ddd04cfa49e667ff50d89979b804d9946e14846e58dea72b87a0f2a7cb" exitCode=0 Dec 05 08:15:22 crc kubenswrapper[4863]: I1205 08:15:22.413416 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rlnq8" event={"ID":"d9ff5445-d86b-48bb-b323-9911e1ba1a66","Type":"ContainerDied","Data":"78e373ddd04cfa49e667ff50d89979b804d9946e14846e58dea72b87a0f2a7cb"} Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.714788 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.858389 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.858838 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.858915 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.858940 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.859007 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.859026 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.859099 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9l5fv\" (UniqueName: \"kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv\") pod \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\" (UID: \"d9ff5445-d86b-48bb-b323-9911e1ba1a66\") " Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.859568 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d9ff5445-d86b-48bb-b323-9911e1ba1a66-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.864446 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts" (OuterVolumeSpecName: "scripts") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.865116 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.878643 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv" (OuterVolumeSpecName: "kube-api-access-9l5fv") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "kube-api-access-9l5fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.895807 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.899273 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data" (OuterVolumeSpecName: "config-data") pod "d9ff5445-d86b-48bb-b323-9911e1ba1a66" (UID: "d9ff5445-d86b-48bb-b323-9911e1ba1a66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.960752 4863 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.960791 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.960821 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.960832 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9ff5445-d86b-48bb-b323-9911e1ba1a66-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:23 crc kubenswrapper[4863]: I1205 08:15:23.960842 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9l5fv\" (UniqueName: \"kubernetes.io/projected/d9ff5445-d86b-48bb-b323-9911e1ba1a66-kube-api-access-9l5fv\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.431175 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rlnq8" event={"ID":"d9ff5445-d86b-48bb-b323-9911e1ba1a66","Type":"ContainerDied","Data":"98b1071e93d17376b9860e6dfd7ba6a8b9136d3b70cdf65f1ea9bad438287f52"} Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.431436 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98b1071e93d17376b9860e6dfd7ba6a8b9136d3b70cdf65f1ea9bad438287f52" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.431259 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rlnq8" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.721710 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:15:24 crc kubenswrapper[4863]: E1205 08:15:24.722227 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" containerName="collect-profiles" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.722242 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" containerName="collect-profiles" Dec 05 08:15:24 crc kubenswrapper[4863]: E1205 08:15:24.722284 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ff5445-d86b-48bb-b323-9911e1ba1a66" containerName="cinder-db-sync" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.722293 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ff5445-d86b-48bb-b323-9911e1ba1a66" containerName="cinder-db-sync" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.722533 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ff5445-d86b-48bb-b323-9911e1ba1a66" containerName="cinder-db-sync" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.722576 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" containerName="collect-profiles" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.723806 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.743848 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.824049 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.825608 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.827593 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.827942 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rgmb7" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.828096 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.828239 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.845683 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.878017 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmqz4\" (UniqueName: \"kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.878083 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.878163 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.878189 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.878220 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.979572 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.979905 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.979949 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.979977 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d9nn\" (UniqueName: \"kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.979998 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980050 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmqz4\" (UniqueName: \"kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980094 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980133 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980183 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980223 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980260 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.980282 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.981169 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.981204 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.981887 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:24 crc kubenswrapper[4863]: I1205 08:15:24.981990 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.007709 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmqz4\" (UniqueName: \"kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4\") pod \"dnsmasq-dns-db886c5f9-4wqdt\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.060065 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081409 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081484 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081522 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081560 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081590 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081615 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d9nn\" (UniqueName: \"kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081634 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.081608 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.082376 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.085397 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.087305 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.088615 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.089256 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.104803 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d9nn\" (UniqueName: \"kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn\") pod \"cinder-api-0\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.145397 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.585244 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:15:25 crc kubenswrapper[4863]: W1205 08:15:25.595949 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ea1b31_9203_4ce1_9846_a27fb891bed8.slice/crio-94cb01dd33ee5b7a3beaded3914ceacf1abc494c63953436801b208bb3c17e5c WatchSource:0}: Error finding container 94cb01dd33ee5b7a3beaded3914ceacf1abc494c63953436801b208bb3c17e5c: Status 404 returned error can't find the container with id 94cb01dd33ee5b7a3beaded3914ceacf1abc494c63953436801b208bb3c17e5c Dec 05 08:15:25 crc kubenswrapper[4863]: W1205 08:15:25.765866 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2a1d8b4_82aa_4b19_bd61_38074852cbb7.slice/crio-d8f84f6095e530d7e27cd092161ac0bb25f5eeee78d7fb10021ac1b191b3c24b WatchSource:0}: Error finding container d8f84f6095e530d7e27cd092161ac0bb25f5eeee78d7fb10021ac1b191b3c24b: Status 404 returned error can't find the container with id d8f84f6095e530d7e27cd092161ac0bb25f5eeee78d7fb10021ac1b191b3c24b Dec 05 08:15:25 crc kubenswrapper[4863]: I1205 08:15:25.768923 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:15:26 crc kubenswrapper[4863]: I1205 08:15:26.452066 4863 generic.go:334] "Generic (PLEG): container finished" podID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerID="2531af9f8fa9dae1bd0196eae7f61ecb7d589aeb855f9c505aa8a6bfee5b6cdb" exitCode=0 Dec 05 08:15:26 crc kubenswrapper[4863]: I1205 08:15:26.452462 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" event={"ID":"35ea1b31-9203-4ce1-9846-a27fb891bed8","Type":"ContainerDied","Data":"2531af9f8fa9dae1bd0196eae7f61ecb7d589aeb855f9c505aa8a6bfee5b6cdb"} Dec 05 08:15:26 crc kubenswrapper[4863]: I1205 08:15:26.452542 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" event={"ID":"35ea1b31-9203-4ce1-9846-a27fb891bed8","Type":"ContainerStarted","Data":"94cb01dd33ee5b7a3beaded3914ceacf1abc494c63953436801b208bb3c17e5c"} Dec 05 08:15:26 crc kubenswrapper[4863]: I1205 08:15:26.453889 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerStarted","Data":"1d9bcef128b395537dc26a66c0e28bafbb811c27a48c6e61f930b559a3b524db"} Dec 05 08:15:26 crc kubenswrapper[4863]: I1205 08:15:26.453912 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerStarted","Data":"d8f84f6095e530d7e27cd092161ac0bb25f5eeee78d7fb10021ac1b191b3c24b"} Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.466121 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerStarted","Data":"a16d831a6025bf2f98e64be7ee84fb6e1f9b33b8e3d36ac1aae1d28bc429bc6b"} Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.466855 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.469810 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" event={"ID":"35ea1b31-9203-4ce1-9846-a27fb891bed8","Type":"ContainerStarted","Data":"717b46cbcaad57f87a99aa506b9db547cf39166ec9324df12b8d44a4d9021091"} Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.469966 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.493526 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.4935037700000002 podStartE2EDuration="3.49350377s" podCreationTimestamp="2025-12-05 08:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:27.484980213 +0000 UTC m=+5355.210977273" watchObservedRunningTime="2025-12-05 08:15:27.49350377 +0000 UTC m=+5355.219500830" Dec 05 08:15:27 crc kubenswrapper[4863]: I1205 08:15:27.518882 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" podStartSLOduration=3.5188298639999998 podStartE2EDuration="3.518829864s" podCreationTimestamp="2025-12-05 08:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:27.506034144 +0000 UTC m=+5355.232031204" watchObservedRunningTime="2025-12-05 08:15:27.518829864 +0000 UTC m=+5355.244826924" Dec 05 08:15:34 crc kubenswrapper[4863]: I1205 08:15:34.500867 4863 scope.go:117] "RemoveContainer" containerID="22562f2697907e8928049cf9c8db8109a2ba4fe927a106fc74de99412ba4d663" Dec 05 08:15:35 crc kubenswrapper[4863]: I1205 08:15:35.062729 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:15:35 crc kubenswrapper[4863]: I1205 08:15:35.133480 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:15:35 crc kubenswrapper[4863]: I1205 08:15:35.133715 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="dnsmasq-dns" containerID="cri-o://c8cb47a9db48fce7e49e670296ce284984c789ff835aae3283a4804e49dff802" gracePeriod=10 Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.027986 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.67:5353: connect: connection refused" Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.584173 4863 generic.go:334] "Generic (PLEG): container finished" podID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerID="c8cb47a9db48fce7e49e670296ce284984c789ff835aae3283a4804e49dff802" exitCode=0 Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.584228 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" event={"ID":"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3","Type":"ContainerDied","Data":"c8cb47a9db48fce7e49e670296ce284984c789ff835aae3283a4804e49dff802"} Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.902120 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.989600 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:36 crc kubenswrapper[4863]: I1205 08:15:36.989851 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5772bcb2a3e0c7889d635921717b3d759f58af26ee5d45aee8d159462ca3cc5e" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.012000 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.012459 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" containerName="nova-scheduler-scheduler" containerID="cri-o://58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.027530 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.027796 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-log" containerID="cri-o://033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.028246 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-api" containerID="cri-o://681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.039648 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc\") pod \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.039774 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb\") pod \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.039824 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bswr\" (UniqueName: \"kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr\") pod \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.039911 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config\") pod \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.039957 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb\") pod \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\" (UID: \"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.049659 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr" (OuterVolumeSpecName: "kube-api-access-5bswr") pod "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" (UID: "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3"). InnerVolumeSpecName "kube-api-access-5bswr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.058826 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.059171 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" containerID="cri-o://e2979e6bbbac68da19e0c28b79ca66939763bca7ef9cedf774aaaed85444769d" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.059358 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" containerID="cri-o://a5169e86500186b3f15dbf894a2ceac8b0467fe904bb600804be8f15fde76d9f" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.081842 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.082069 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" containerID="cri-o://0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" gracePeriod=30 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.132398 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" (UID: "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.132622 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config" (OuterVolumeSpecName: "config") pod "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" (UID: "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.142672 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bswr\" (UniqueName: \"kubernetes.io/projected/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-kube-api-access-5bswr\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.142709 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.142720 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.155379 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" (UID: "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.164112 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" (UID: "db439f3b-1a4e-4dbc-977d-07e95b8b9ba3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.244169 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.244205 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.275813 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.609448 4863 generic.go:334] "Generic (PLEG): container finished" podID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerID="e2979e6bbbac68da19e0c28b79ca66939763bca7ef9cedf774aaaed85444769d" exitCode=143 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.610011 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerDied","Data":"e2979e6bbbac68da19e0c28b79ca66939763bca7ef9cedf774aaaed85444769d"} Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.614673 4863 generic.go:334] "Generic (PLEG): container finished" podID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerID="033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf" exitCode=143 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.614752 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerDied","Data":"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf"} Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.625765 4863 generic.go:334] "Generic (PLEG): container finished" podID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" containerID="5772bcb2a3e0c7889d635921717b3d759f58af26ee5d45aee8d159462ca3cc5e" exitCode=0 Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.625834 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a3ecd8f0-9470-4b55-8657-2a1aba539e56","Type":"ContainerDied","Data":"5772bcb2a3e0c7889d635921717b3d759f58af26ee5d45aee8d159462ca3cc5e"} Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.634846 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" event={"ID":"db439f3b-1a4e-4dbc-977d-07e95b8b9ba3","Type":"ContainerDied","Data":"4e2107b5fde15e7a1d1434eb4a3068e7089adf42149aca2e581ae177504a1339"} Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.634908 4863 scope.go:117] "RemoveContainer" containerID="c8cb47a9db48fce7e49e670296ce284984c789ff835aae3283a4804e49dff802" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.635097 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.668585 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.674584 4863 scope.go:117] "RemoveContainer" containerID="82d1b2c965f21db2c2d9d36b828bb5b20b7d5b0e2a67d978e1cd2a1f76e79eb7" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.678424 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f7d6fbf6f-jvmj4"] Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.730077 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.862570 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dfv8\" (UniqueName: \"kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8\") pod \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.862688 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data\") pod \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.862713 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle\") pod \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\" (UID: \"a3ecd8f0-9470-4b55-8657-2a1aba539e56\") " Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.868726 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8" (OuterVolumeSpecName: "kube-api-access-4dfv8") pod "a3ecd8f0-9470-4b55-8657-2a1aba539e56" (UID: "a3ecd8f0-9470-4b55-8657-2a1aba539e56"). InnerVolumeSpecName "kube-api-access-4dfv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.896399 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3ecd8f0-9470-4b55-8657-2a1aba539e56" (UID: "a3ecd8f0-9470-4b55-8657-2a1aba539e56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.915236 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data" (OuterVolumeSpecName: "config-data") pod "a3ecd8f0-9470-4b55-8657-2a1aba539e56" (UID: "a3ecd8f0-9470-4b55-8657-2a1aba539e56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.964481 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dfv8\" (UniqueName: \"kubernetes.io/projected/a3ecd8f0-9470-4b55-8657-2a1aba539e56-kube-api-access-4dfv8\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.964511 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:37 crc kubenswrapper[4863]: I1205 08:15:37.964522 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3ecd8f0-9470-4b55-8657-2a1aba539e56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.463699 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.467287 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.468904 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.468990 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.613124 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" path="/var/lib/kubelet/pods/db439f3b-1a4e-4dbc-977d-07e95b8b9ba3/volumes" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.646500 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a3ecd8f0-9470-4b55-8657-2a1aba539e56","Type":"ContainerDied","Data":"a41c2f3e510e1c4d2bd1e64ec0ad4b4e543c107a07ff6223e2078316b4a4909b"} Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.646560 4863 scope.go:117] "RemoveContainer" containerID="5772bcb2a3e0c7889d635921717b3d759f58af26ee5d45aee8d159462ca3cc5e" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.646723 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.673988 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.686418 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.697673 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.698134 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="dnsmasq-dns" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.698155 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="dnsmasq-dns" Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.698202 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.698213 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:15:38 crc kubenswrapper[4863]: E1205 08:15:38.698226 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="init" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.698234 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="init" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.698465 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.698524 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="db439f3b-1a4e-4dbc-977d-07e95b8b9ba3" containerName="dnsmasq-dns" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.699275 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.706343 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.706773 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.882166 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.882320 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4dcw\" (UniqueName: \"kubernetes.io/projected/269bb40a-af4e-435f-93b8-6a8db7891dfa-kube-api-access-r4dcw\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.882402 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.983372 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.983527 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4dcw\" (UniqueName: \"kubernetes.io/projected/269bb40a-af4e-435f-93b8-6a8db7891dfa-kube-api-access-r4dcw\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.983604 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.987624 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:38 crc kubenswrapper[4863]: I1205 08:15:38.988112 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bb40a-af4e-435f-93b8-6a8db7891dfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.003811 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4dcw\" (UniqueName: \"kubernetes.io/projected/269bb40a-af4e-435f-93b8-6a8db7891dfa-kube-api-access-r4dcw\") pod \"nova-cell1-novncproxy-0\" (UID: \"269bb40a-af4e-435f-93b8-6a8db7891dfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.060160 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.519299 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.571711 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:15:39 crc kubenswrapper[4863]: W1205 08:15:39.576071 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod269bb40a_af4e_435f_93b8_6a8db7891dfa.slice/crio-88912ad4581f6aada279b914d4617544094cdca9dd98cdc45da9ef8ed8942937 WatchSource:0}: Error finding container 88912ad4581f6aada279b914d4617544094cdca9dd98cdc45da9ef8ed8942937: Status 404 returned error can't find the container with id 88912ad4581f6aada279b914d4617544094cdca9dd98cdc45da9ef8ed8942937 Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.595954 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh2sk\" (UniqueName: \"kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk\") pod \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.596010 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data\") pod \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.596189 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle\") pod \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\" (UID: \"4b7f6de9-2f45-4b71-a83a-22c96efddbbe\") " Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.599656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk" (OuterVolumeSpecName: "kube-api-access-kh2sk") pod "4b7f6de9-2f45-4b71-a83a-22c96efddbbe" (UID: "4b7f6de9-2f45-4b71-a83a-22c96efddbbe"). InnerVolumeSpecName "kube-api-access-kh2sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.620815 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data" (OuterVolumeSpecName: "config-data") pod "4b7f6de9-2f45-4b71-a83a-22c96efddbbe" (UID: "4b7f6de9-2f45-4b71-a83a-22c96efddbbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.629291 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b7f6de9-2f45-4b71-a83a-22c96efddbbe" (UID: "4b7f6de9-2f45-4b71-a83a-22c96efddbbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.659724 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"269bb40a-af4e-435f-93b8-6a8db7891dfa","Type":"ContainerStarted","Data":"88912ad4581f6aada279b914d4617544094cdca9dd98cdc45da9ef8ed8942937"} Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.662574 4863 generic.go:334] "Generic (PLEG): container finished" podID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" containerID="58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8" exitCode=0 Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.662620 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.662632 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7f6de9-2f45-4b71-a83a-22c96efddbbe","Type":"ContainerDied","Data":"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8"} Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.662651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7f6de9-2f45-4b71-a83a-22c96efddbbe","Type":"ContainerDied","Data":"6a71737c8e96d55f8e12c2b86bbf184e6e7bd2ab469a19083e808fb84beed741"} Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.662667 4863 scope.go:117] "RemoveContainer" containerID="58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.698830 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh2sk\" (UniqueName: \"kubernetes.io/projected/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-kube-api-access-kh2sk\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.698863 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.698875 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7f6de9-2f45-4b71-a83a-22c96efddbbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.706499 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.715667 4863 scope.go:117] "RemoveContainer" containerID="58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8" Dec 05 08:15:39 crc kubenswrapper[4863]: E1205 08:15:39.716241 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8\": container with ID starting with 58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8 not found: ID does not exist" containerID="58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.716286 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8"} err="failed to get container status \"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8\": rpc error: code = NotFound desc = could not find container \"58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8\": container with ID starting with 58fa87ef92d80bc098d39941bb30552c8e70f7219a3f504f8bb1d1efafd00bc8 not found: ID does not exist" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.724397 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.740556 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:39 crc kubenswrapper[4863]: E1205 08:15:39.741150 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" containerName="nova-scheduler-scheduler" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.741182 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" containerName="nova-scheduler-scheduler" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.741586 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" containerName="nova-scheduler-scheduler" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.742529 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.745442 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.751164 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.800174 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pcrj\" (UniqueName: \"kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.800236 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.800315 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.901459 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pcrj\" (UniqueName: \"kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.901570 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.901611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.905603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.914081 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:39 crc kubenswrapper[4863]: I1205 08:15:39.916286 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pcrj\" (UniqueName: \"kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj\") pod \"nova-scheduler-0\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " pod="openstack/nova-scheduler-0" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.063708 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.260936 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.261623 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerName="nova-cell1-conductor-conductor" containerID="cri-o://b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" gracePeriod=30 Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.505101 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:15:40 crc kubenswrapper[4863]: W1205 08:15:40.513457 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d153922_7ce3_4480_82d8_c02ae2163538.slice/crio-7f4650d8f5aec9760acbb0718197d5430b80fed6c154d94c20061de57ec3fc64 WatchSource:0}: Error finding container 7f4650d8f5aec9760acbb0718197d5430b80fed6c154d94c20061de57ec3fc64: Status 404 returned error can't find the container with id 7f4650d8f5aec9760acbb0718197d5430b80fed6c154d94c20061de57ec3fc64 Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.617234 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b7f6de9-2f45-4b71-a83a-22c96efddbbe" path="/var/lib/kubelet/pods/4b7f6de9-2f45-4b71-a83a-22c96efddbbe/volumes" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.618215 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3ecd8f0-9470-4b55-8657-2a1aba539e56" path="/var/lib/kubelet/pods/a3ecd8f0-9470-4b55-8657-2a1aba539e56/volumes" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.619782 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.716275 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"269bb40a-af4e-435f-93b8-6a8db7891dfa","Type":"ContainerStarted","Data":"d5346fe195c1d75c2d24fba063bfdbaffa21dae8d03a227144af70130d4d6bdf"} Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.717356 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp2p4\" (UniqueName: \"kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4\") pod \"c49c5191-38ab-4a6f-93c6-e9e477266d51\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.717574 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle\") pod \"c49c5191-38ab-4a6f-93c6-e9e477266d51\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.717734 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data\") pod \"c49c5191-38ab-4a6f-93c6-e9e477266d51\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.717849 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs\") pod \"c49c5191-38ab-4a6f-93c6-e9e477266d51\" (UID: \"c49c5191-38ab-4a6f-93c6-e9e477266d51\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.718856 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs" (OuterVolumeSpecName: "logs") pod "c49c5191-38ab-4a6f-93c6-e9e477266d51" (UID: "c49c5191-38ab-4a6f-93c6-e9e477266d51"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.719569 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c49c5191-38ab-4a6f-93c6-e9e477266d51-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.720160 4863 generic.go:334] "Generic (PLEG): container finished" podID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerID="a5169e86500186b3f15dbf894a2ceac8b0467fe904bb600804be8f15fde76d9f" exitCode=0 Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.720236 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerDied","Data":"a5169e86500186b3f15dbf894a2ceac8b0467fe904bb600804be8f15fde76d9f"} Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.730145 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5d153922-7ce3-4480-82d8-c02ae2163538","Type":"ContainerStarted","Data":"7f4650d8f5aec9760acbb0718197d5430b80fed6c154d94c20061de57ec3fc64"} Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.743260 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4" (OuterVolumeSpecName: "kube-api-access-hp2p4") pod "c49c5191-38ab-4a6f-93c6-e9e477266d51" (UID: "c49c5191-38ab-4a6f-93c6-e9e477266d51"). InnerVolumeSpecName "kube-api-access-hp2p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752142 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.752118801 podStartE2EDuration="2.752118801s" podCreationTimestamp="2025-12-05 08:15:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:40.736315957 +0000 UTC m=+5368.462312997" watchObservedRunningTime="2025-12-05 08:15:40.752118801 +0000 UTC m=+5368.478115841" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752310 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerDied","Data":"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b"} Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752314 4863 generic.go:334] "Generic (PLEG): container finished" podID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerID="681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b" exitCode=0 Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752332 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c49c5191-38ab-4a6f-93c6-e9e477266d51","Type":"ContainerDied","Data":"092afbc32f8a3a3e69b8ae911661a8a37a4e3d4bbb6cff0064b7814269947bd7"} Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752347 4863 scope.go:117] "RemoveContainer" containerID="681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.752428 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.823912 4863 scope.go:117] "RemoveContainer" containerID="033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.824772 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp2p4\" (UniqueName: \"kubernetes.io/projected/c49c5191-38ab-4a6f-93c6-e9e477266d51-kube-api-access-hp2p4\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.845040 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.854563 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data" (OuterVolumeSpecName: "config-data") pod "c49c5191-38ab-4a6f-93c6-e9e477266d51" (UID: "c49c5191-38ab-4a6f-93c6-e9e477266d51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.856701 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c49c5191-38ab-4a6f-93c6-e9e477266d51" (UID: "c49c5191-38ab-4a6f-93c6-e9e477266d51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.896741 4863 scope.go:117] "RemoveContainer" containerID="681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b" Dec 05 08:15:40 crc kubenswrapper[4863]: E1205 08:15:40.900571 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b\": container with ID starting with 681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b not found: ID does not exist" containerID="681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.900610 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b"} err="failed to get container status \"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b\": rpc error: code = NotFound desc = could not find container \"681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b\": container with ID starting with 681fdf685a36e66b6b499e0e9e28b74c5996f38806cc9bac1645ea92a9e4ef8b not found: ID does not exist" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.900634 4863 scope.go:117] "RemoveContainer" containerID="033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf" Dec 05 08:15:40 crc kubenswrapper[4863]: E1205 08:15:40.904698 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf\": container with ID starting with 033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf not found: ID does not exist" containerID="033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.904877 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf"} err="failed to get container status \"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf\": rpc error: code = NotFound desc = could not find container \"033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf\": container with ID starting with 033137fd9bdcad6299ab4d9b9692e13c7fa15998f4c5500dfa05eadf7af2d9cf not found: ID does not exist" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.926225 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs\") pod \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.926579 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle\") pod \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.926744 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data\") pod \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.926888 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhprm\" (UniqueName: \"kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm\") pod \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\" (UID: \"d82fa16f-21cf-4b96-a99c-4e4d194d27cb\") " Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.927818 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.927905 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c49c5191-38ab-4a6f-93c6-e9e477266d51-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.928057 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs" (OuterVolumeSpecName: "logs") pod "d82fa16f-21cf-4b96-a99c-4e4d194d27cb" (UID: "d82fa16f-21cf-4b96-a99c-4e4d194d27cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.931397 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm" (OuterVolumeSpecName: "kube-api-access-nhprm") pod "d82fa16f-21cf-4b96-a99c-4e4d194d27cb" (UID: "d82fa16f-21cf-4b96-a99c-4e4d194d27cb"). InnerVolumeSpecName "kube-api-access-nhprm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.963658 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d82fa16f-21cf-4b96-a99c-4e4d194d27cb" (UID: "d82fa16f-21cf-4b96-a99c-4e4d194d27cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:40 crc kubenswrapper[4863]: I1205 08:15:40.975581 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data" (OuterVolumeSpecName: "config-data") pod "d82fa16f-21cf-4b96-a99c-4e4d194d27cb" (UID: "d82fa16f-21cf-4b96-a99c-4e4d194d27cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.034825 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhprm\" (UniqueName: \"kubernetes.io/projected/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-kube-api-access-nhprm\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.034868 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.034884 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.034895 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d82fa16f-21cf-4b96-a99c-4e4d194d27cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.091539 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.109737 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.122526 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.122894 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-log" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.122917 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-log" Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.122941 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.122947 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.122974 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-api" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.122980 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-api" Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.122989 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.122995 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.123150 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.123170 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-api" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.123180 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" containerName="nova-api-log" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.123189 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.124077 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.126343 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.152805 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.237506 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsp22\" (UniqueName: \"kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.237565 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.237598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.237627 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.339005 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsp22\" (UniqueName: \"kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.339063 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.339102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.339139 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.339725 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.344141 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.344249 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.360360 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsp22\" (UniqueName: \"kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22\") pod \"nova-api-0\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.444657 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.728766 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.730692 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.732028 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:41 crc kubenswrapper[4863]: E1205 08:15:41.732065 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerName="nova-cell1-conductor-conductor" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.770140 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d82fa16f-21cf-4b96-a99c-4e4d194d27cb","Type":"ContainerDied","Data":"df3bbac69ef1384c966daac06c749e5541b621df58854e08818a56e4b545ac57"} Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.770191 4863 scope.go:117] "RemoveContainer" containerID="a5169e86500186b3f15dbf894a2ceac8b0467fe904bb600804be8f15fde76d9f" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.770198 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.773593 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5d153922-7ce3-4480-82d8-c02ae2163538","Type":"ContainerStarted","Data":"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c"} Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.792762 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.79274172 podStartE2EDuration="2.79274172s" podCreationTimestamp="2025-12-05 08:15:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:41.791133862 +0000 UTC m=+5369.517130912" watchObservedRunningTime="2025-12-05 08:15:41.79274172 +0000 UTC m=+5369.518738760" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.793721 4863 scope.go:117] "RemoveContainer" containerID="e2979e6bbbac68da19e0c28b79ca66939763bca7ef9cedf774aaaed85444769d" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.810504 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.827571 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.837989 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.839604 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.842574 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.856017 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:41 crc kubenswrapper[4863]: W1205 08:15:41.939920 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc66091e0_5ab3_4217_9855_8b8be3129d9e.slice/crio-ba6be816d67c2fb28d5f15fe7ce49a6cab78d773fa1c3f4c381f32eb198edc8c WatchSource:0}: Error finding container ba6be816d67c2fb28d5f15fe7ce49a6cab78d773fa1c3f4c381f32eb198edc8c: Status 404 returned error can't find the container with id ba6be816d67c2fb28d5f15fe7ce49a6cab78d773fa1c3f4c381f32eb198edc8c Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.948393 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.948491 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.948515 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.948631 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wrxj\" (UniqueName: \"kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:41 crc kubenswrapper[4863]: I1205 08:15:41.951166 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.050594 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wrxj\" (UniqueName: \"kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.050679 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.050727 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.050763 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.051161 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.057609 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.057994 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.073590 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wrxj\" (UniqueName: \"kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj\") pod \"nova-metadata-0\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.159689 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.620757 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c49c5191-38ab-4a6f-93c6-e9e477266d51" path="/var/lib/kubelet/pods/c49c5191-38ab-4a6f-93c6-e9e477266d51/volumes" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.622914 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" path="/var/lib/kubelet/pods/d82fa16f-21cf-4b96-a99c-4e4d194d27cb/volumes" Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.643967 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.794179 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerStarted","Data":"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597"} Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.794233 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerStarted","Data":"d23f27a46fb769341db852e83359d389d61867e7bd71c4838cdcc9f2fcd7dab7"} Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.799923 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerStarted","Data":"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6"} Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.799956 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerStarted","Data":"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275"} Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.799969 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerStarted","Data":"ba6be816d67c2fb28d5f15fe7ce49a6cab78d773fa1c3f4c381f32eb198edc8c"} Dec 05 08:15:42 crc kubenswrapper[4863]: I1205 08:15:42.824284 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.82426935 podStartE2EDuration="1.82426935s" podCreationTimestamp="2025-12-05 08:15:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:42.82137495 +0000 UTC m=+5370.547371990" watchObservedRunningTime="2025-12-05 08:15:42.82426935 +0000 UTC m=+5370.550266390" Dec 05 08:15:43 crc kubenswrapper[4863]: E1205 08:15:43.465982 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:43 crc kubenswrapper[4863]: E1205 08:15:43.468268 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:43 crc kubenswrapper[4863]: E1205 08:15:43.470548 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 08:15:43 crc kubenswrapper[4863]: E1205 08:15:43.470680 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" Dec 05 08:15:43 crc kubenswrapper[4863]: I1205 08:15:43.817015 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerStarted","Data":"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c"} Dec 05 08:15:43 crc kubenswrapper[4863]: I1205 08:15:43.855172 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.855117633 podStartE2EDuration="2.855117633s" podCreationTimestamp="2025-12-05 08:15:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:43.847134639 +0000 UTC m=+5371.573131679" watchObservedRunningTime="2025-12-05 08:15:43.855117633 +0000 UTC m=+5371.581114673" Dec 05 08:15:44 crc kubenswrapper[4863]: I1205 08:15:44.060810 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.064411 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.551849 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.552300 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d82fa16f-21cf-4b96-a99c-4e4d194d27cb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": dial tcp 10.217.1.66:8775: i/o timeout (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.835086 4863 generic.go:334] "Generic (PLEG): container finished" podID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerID="b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" exitCode=0 Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.835346 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a542d65a-3183-4576-9cac-d49b7610ecf3","Type":"ContainerDied","Data":"b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d"} Dec 05 08:15:45 crc kubenswrapper[4863]: I1205 08:15:45.946026 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.020531 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data\") pod \"a542d65a-3183-4576-9cac-d49b7610ecf3\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.020673 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle\") pod \"a542d65a-3183-4576-9cac-d49b7610ecf3\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.020735 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v82bc\" (UniqueName: \"kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc\") pod \"a542d65a-3183-4576-9cac-d49b7610ecf3\" (UID: \"a542d65a-3183-4576-9cac-d49b7610ecf3\") " Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.028673 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc" (OuterVolumeSpecName: "kube-api-access-v82bc") pod "a542d65a-3183-4576-9cac-d49b7610ecf3" (UID: "a542d65a-3183-4576-9cac-d49b7610ecf3"). InnerVolumeSpecName "kube-api-access-v82bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.051116 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a542d65a-3183-4576-9cac-d49b7610ecf3" (UID: "a542d65a-3183-4576-9cac-d49b7610ecf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.058721 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data" (OuterVolumeSpecName: "config-data") pod "a542d65a-3183-4576-9cac-d49b7610ecf3" (UID: "a542d65a-3183-4576-9cac-d49b7610ecf3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.124738 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v82bc\" (UniqueName: \"kubernetes.io/projected/a542d65a-3183-4576-9cac-d49b7610ecf3-kube-api-access-v82bc\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.126515 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.126541 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a542d65a-3183-4576-9cac-d49b7610ecf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.847729 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a542d65a-3183-4576-9cac-d49b7610ecf3","Type":"ContainerDied","Data":"902ab79be26add44244ae5fa55e5485dcc44a29cc224f6244c4f706140217fa7"} Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.848153 4863 scope.go:117] "RemoveContainer" containerID="b53eba11a4fa37e57e61771968f112f4ddab0ae6e111d29b4fb220a9780b537d" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.847834 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.895542 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.907582 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.922685 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:46 crc kubenswrapper[4863]: E1205 08:15:46.942036 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerName="nova-cell1-conductor-conductor" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.942096 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerName="nova-cell1-conductor-conductor" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.944798 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" containerName="nova-cell1-conductor-conductor" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.951544 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.962990 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 08:15:46 crc kubenswrapper[4863]: I1205 08:15:46.963339 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.051559 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.051611 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6bzl\" (UniqueName: \"kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.051634 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.154034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.156381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6bzl\" (UniqueName: \"kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.156582 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.160211 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.160358 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.167223 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.170846 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.174702 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6bzl\" (UniqueName: \"kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl\") pod \"nova-cell1-conductor-0\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.271303 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.773017 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.859679 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.863081 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c8263033-2370-4ebb-9eef-74f211520a1a","Type":"ContainerStarted","Data":"54d1ef8ff1737ade0aeec39a63977a5f9a57a352c6155765d295a941f8d0d37e"} Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.864618 4863 generic.go:334] "Generic (PLEG): container finished" podID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" exitCode=0 Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.864643 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"969424ae-7de9-4f26-b3c5-c7e50563e32a","Type":"ContainerDied","Data":"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb"} Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.864660 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"969424ae-7de9-4f26-b3c5-c7e50563e32a","Type":"ContainerDied","Data":"28f1c3ca7de0d15ea7f98869d746af935e23efb602eda296c67fa5e7eeb8368d"} Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.864677 4863 scope.go:117] "RemoveContainer" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.864706 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.877336 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle\") pod \"969424ae-7de9-4f26-b3c5-c7e50563e32a\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.877413 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data\") pod \"969424ae-7de9-4f26-b3c5-c7e50563e32a\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.877544 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7bm8\" (UniqueName: \"kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8\") pod \"969424ae-7de9-4f26-b3c5-c7e50563e32a\" (UID: \"969424ae-7de9-4f26-b3c5-c7e50563e32a\") " Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.883980 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8" (OuterVolumeSpecName: "kube-api-access-p7bm8") pod "969424ae-7de9-4f26-b3c5-c7e50563e32a" (UID: "969424ae-7de9-4f26-b3c5-c7e50563e32a"). InnerVolumeSpecName "kube-api-access-p7bm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.905981 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data" (OuterVolumeSpecName: "config-data") pod "969424ae-7de9-4f26-b3c5-c7e50563e32a" (UID: "969424ae-7de9-4f26-b3c5-c7e50563e32a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.909997 4863 scope.go:117] "RemoveContainer" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" Dec 05 08:15:47 crc kubenswrapper[4863]: E1205 08:15:47.910427 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb\": container with ID starting with 0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb not found: ID does not exist" containerID="0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.910482 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb"} err="failed to get container status \"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb\": rpc error: code = NotFound desc = could not find container \"0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb\": container with ID starting with 0fa9886dbb18f94eb3f20aaa9644182a9f4838ec28dd9b091948f3d5041065eb not found: ID does not exist" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.928233 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "969424ae-7de9-4f26-b3c5-c7e50563e32a" (UID: "969424ae-7de9-4f26-b3c5-c7e50563e32a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.979223 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.979258 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969424ae-7de9-4f26-b3c5-c7e50563e32a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:47 crc kubenswrapper[4863]: I1205 08:15:47.979268 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7bm8\" (UniqueName: \"kubernetes.io/projected/969424ae-7de9-4f26-b3c5-c7e50563e32a-kube-api-access-p7bm8\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.243044 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.256324 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.270082 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:48 crc kubenswrapper[4863]: E1205 08:15:48.270762 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.270860 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.271196 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" containerName="nova-cell0-conductor-conductor" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.272662 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.279727 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.289859 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.388211 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.388336 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.388376 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7j69\" (UniqueName: \"kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.489627 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.489901 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7j69\" (UniqueName: \"kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.490049 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.508252 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.508435 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.514952 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7j69\" (UniqueName: \"kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69\") pod \"nova-cell0-conductor-0\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.594898 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.620919 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="969424ae-7de9-4f26-b3c5-c7e50563e32a" path="/var/lib/kubelet/pods/969424ae-7de9-4f26-b3c5-c7e50563e32a/volumes" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.621865 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a542d65a-3183-4576-9cac-d49b7610ecf3" path="/var/lib/kubelet/pods/a542d65a-3183-4576-9cac-d49b7610ecf3/volumes" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.874750 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c8263033-2370-4ebb-9eef-74f211520a1a","Type":"ContainerStarted","Data":"d35e0e4f743899c93c75be36ac6fc6e73301a084d29f11841c08ca72ed79cdc3"} Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.874914 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:48 crc kubenswrapper[4863]: I1205 08:15:48.896515 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.896500747 podStartE2EDuration="2.896500747s" podCreationTimestamp="2025-12-05 08:15:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:48.889461577 +0000 UTC m=+5376.615458617" watchObservedRunningTime="2025-12-05 08:15:48.896500747 +0000 UTC m=+5376.622497787" Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.055219 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.060385 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:49 crc kubenswrapper[4863]: W1205 08:15:49.061669 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3746ab9_9512_4d76_94fe_312dd6679c25.slice/crio-14ae5ebf35aa33330d54b9bf5c871dbd6dd56749dbf749e424f7b647d3c0282f WatchSource:0}: Error finding container 14ae5ebf35aa33330d54b9bf5c871dbd6dd56749dbf749e424f7b647d3c0282f: Status 404 returned error can't find the container with id 14ae5ebf35aa33330d54b9bf5c871dbd6dd56749dbf749e424f7b647d3c0282f Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.071362 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.890435 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3746ab9-9512-4d76-94fe-312dd6679c25","Type":"ContainerStarted","Data":"3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55"} Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.890746 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3746ab9-9512-4d76-94fe-312dd6679c25","Type":"ContainerStarted","Data":"14ae5ebf35aa33330d54b9bf5c871dbd6dd56749dbf749e424f7b647d3c0282f"} Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.901553 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:15:49 crc kubenswrapper[4863]: I1205 08:15:49.913495 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.913458544 podStartE2EDuration="1.913458544s" podCreationTimestamp="2025-12-05 08:15:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:15:49.90714586 +0000 UTC m=+5377.633142900" watchObservedRunningTime="2025-12-05 08:15:49.913458544 +0000 UTC m=+5377.639455584" Dec 05 08:15:50 crc kubenswrapper[4863]: I1205 08:15:50.064760 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:15:50 crc kubenswrapper[4863]: I1205 08:15:50.093015 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:15:50 crc kubenswrapper[4863]: I1205 08:15:50.902301 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:50 crc kubenswrapper[4863]: I1205 08:15:50.938684 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:15:51 crc kubenswrapper[4863]: I1205 08:15:51.445141 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:15:51 crc kubenswrapper[4863]: I1205 08:15:51.445976 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:15:52 crc kubenswrapper[4863]: I1205 08:15:52.160496 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:15:52 crc kubenswrapper[4863]: I1205 08:15:52.160591 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:15:52 crc kubenswrapper[4863]: I1205 08:15:52.527782 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:52 crc kubenswrapper[4863]: I1205 08:15:52.528409 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:53 crc kubenswrapper[4863]: I1205 08:15:53.242814 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:53 crc kubenswrapper[4863]: I1205 08:15:53.242840 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.129342 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.133647 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.136725 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.142552 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218545 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218670 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218743 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218802 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jjsn\" (UniqueName: \"kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.218831 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.320715 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.320834 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.320874 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.320960 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.321034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.321065 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jjsn\" (UniqueName: \"kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.321158 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.327958 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.329151 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.329640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.336332 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.340325 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jjsn\" (UniqueName: \"kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn\") pod \"cinder-scheduler-0\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.452877 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.778023 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:15:54 crc kubenswrapper[4863]: I1205 08:15:54.939771 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerStarted","Data":"96767808a0d8e378b8d89192146f89e39273b795f4a31908acacfef775bacaf5"} Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.480482 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.481030 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api-log" containerID="cri-o://1d9bcef128b395537dc26a66c0e28bafbb811c27a48c6e61f930b559a3b524db" gracePeriod=30 Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.481412 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api" containerID="cri-o://a16d831a6025bf2f98e64be7ee84fb6e1f9b33b8e3d36ac1aae1d28bc429bc6b" gracePeriod=30 Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.961964 4863 generic.go:334] "Generic (PLEG): container finished" podID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerID="1d9bcef128b395537dc26a66c0e28bafbb811c27a48c6e61f930b559a3b524db" exitCode=143 Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.962054 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerDied","Data":"1d9bcef128b395537dc26a66c0e28bafbb811c27a48c6e61f930b559a3b524db"} Dec 05 08:15:55 crc kubenswrapper[4863]: I1205 08:15:55.964675 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerStarted","Data":"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b"} Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.419721 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.421214 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.423751 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.439918 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.468592 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trnps\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-kube-api-access-trnps\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.468944 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.468969 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.468985 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469046 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469083 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469114 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469170 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469210 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469234 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469276 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469314 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469349 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-run\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469368 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469382 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.469400 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570602 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570693 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570753 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570788 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570833 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570856 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570876 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-run\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570890 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570952 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571001 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571031 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571110 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.570900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571181 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571220 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571235 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571274 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-run\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571279 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trnps\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-kube-api-access-trnps\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571372 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571395 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571418 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571554 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.571587 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.572010 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.572050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.573142 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e8126d52-8f00-4eac-9feb-359fb635f044-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.578121 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.578152 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.578569 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.582258 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.583072 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8126d52-8f00-4eac-9feb-359fb635f044-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.591071 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trnps\" (UniqueName: \"kubernetes.io/projected/e8126d52-8f00-4eac-9feb-359fb635f044-kube-api-access-trnps\") pod \"cinder-volume-volume1-0\" (UID: \"e8126d52-8f00-4eac-9feb-359fb635f044\") " pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.739943 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 08:15:56 crc kubenswrapper[4863]: I1205 08:15:56.983927 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerStarted","Data":"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122"} Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.005784 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.71716411 podStartE2EDuration="3.005763553s" podCreationTimestamp="2025-12-05 08:15:54 +0000 UTC" firstStartedPulling="2025-12-05 08:15:54.783354068 +0000 UTC m=+5382.509351118" lastFinishedPulling="2025-12-05 08:15:55.071953521 +0000 UTC m=+5382.797950561" observedRunningTime="2025-12-05 08:15:57.001951571 +0000 UTC m=+5384.727948611" watchObservedRunningTime="2025-12-05 08:15:57.005763553 +0000 UTC m=+5384.731760593" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.207381 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.208788 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.211088 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.234238 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.285935 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-scripts\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.285981 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286002 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-sys\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286022 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286070 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-run\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286090 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286139 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286159 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286183 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286201 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286225 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-dev\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286239 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286252 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286272 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db7hn\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-kube-api-access-db7hn\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286291 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-ceph\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.286306 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-lib-modules\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.314694 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 08:15:57 crc kubenswrapper[4863]: W1205 08:15:57.344295 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8126d52_8f00_4eac_9feb_359fb635f044.slice/crio-2741f470ad82b7615a6ae7f1de8f9b6075de683293d0a391fe65632266fb384b WatchSource:0}: Error finding container 2741f470ad82b7615a6ae7f1de8f9b6075de683293d0a391fe65632266fb384b: Status 404 returned error can't find the container with id 2741f470ad82b7615a6ae7f1de8f9b6075de683293d0a391fe65632266fb384b Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.358670 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.387666 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.387906 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.387933 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-dev\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.387977 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-dev\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.387997 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388024 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db7hn\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-kube-api-access-db7hn\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388118 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-ceph\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388146 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-lib-modules\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388261 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-scripts\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388290 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388328 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-sys\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388356 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388446 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-run\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388501 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388596 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-sys\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388651 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388667 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388955 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-lib-modules\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388794 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-run\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388717 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.388763 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.393279 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.393388 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.389112 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.397440 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.399551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.400151 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-config-data\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.404637 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db7hn\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-kube-api-access-db7hn\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.412586 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-ceph\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.412809 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1b0c15e-d09a-46fa-9a07-15665a5ae23e-scripts\") pod \"cinder-backup-0\" (UID: \"c1b0c15e-d09a-46fa-9a07-15665a5ae23e\") " pod="openstack/cinder-backup-0" Dec 05 08:15:57 crc kubenswrapper[4863]: I1205 08:15:57.526867 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 08:15:58 crc kubenswrapper[4863]: I1205 08:15:58.013558 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e8126d52-8f00-4eac-9feb-359fb635f044","Type":"ContainerStarted","Data":"86f79c775da16e2257c6925c1274a510d601837ee6317940dbc01bcbbfcc3579"} Dec 05 08:15:58 crc kubenswrapper[4863]: I1205 08:15:58.013894 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e8126d52-8f00-4eac-9feb-359fb635f044","Type":"ContainerStarted","Data":"2741f470ad82b7615a6ae7f1de8f9b6075de683293d0a391fe65632266fb384b"} Dec 05 08:15:58 crc kubenswrapper[4863]: I1205 08:15:58.227319 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 08:15:58 crc kubenswrapper[4863]: W1205 08:15:58.227421 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1b0c15e_d09a_46fa_9a07_15665a5ae23e.slice/crio-d32aacf934dbc8e27af3e8305ed7cef4be07a631787eb3c43c5afcc5eb3de906 WatchSource:0}: Error finding container d32aacf934dbc8e27af3e8305ed7cef4be07a631787eb3c43c5afcc5eb3de906: Status 404 returned error can't find the container with id d32aacf934dbc8e27af3e8305ed7cef4be07a631787eb3c43c5afcc5eb3de906 Dec 05 08:15:58 crc kubenswrapper[4863]: I1205 08:15:58.631729 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.041693 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e8126d52-8f00-4eac-9feb-359fb635f044","Type":"ContainerStarted","Data":"ca20e8d272700fe14f7c515442e860bbb9d030265fc2ceb2ec2020e74846b29c"} Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.046739 4863 generic.go:334] "Generic (PLEG): container finished" podID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerID="a16d831a6025bf2f98e64be7ee84fb6e1f9b33b8e3d36ac1aae1d28bc429bc6b" exitCode=0 Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.046813 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerDied","Data":"a16d831a6025bf2f98e64be7ee84fb6e1f9b33b8e3d36ac1aae1d28bc429bc6b"} Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.049781 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"c1b0c15e-d09a-46fa-9a07-15665a5ae23e","Type":"ContainerStarted","Data":"9b12d5d6483ffa63379346ebfcad1c717ada65ccba078a769e57f1c5113a97d8"} Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.049824 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"c1b0c15e-d09a-46fa-9a07-15665a5ae23e","Type":"ContainerStarted","Data":"e686243620279c5779ef506c63eec7a65bd8f45f83a80fd88533ef68f6f31a88"} Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.049835 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"c1b0c15e-d09a-46fa-9a07-15665a5ae23e","Type":"ContainerStarted","Data":"d32aacf934dbc8e27af3e8305ed7cef4be07a631787eb3c43c5afcc5eb3de906"} Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.078921 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.757432837 podStartE2EDuration="3.078895787s" podCreationTimestamp="2025-12-05 08:15:56 +0000 UTC" firstStartedPulling="2025-12-05 08:15:57.346251025 +0000 UTC m=+5385.072248065" lastFinishedPulling="2025-12-05 08:15:57.667713975 +0000 UTC m=+5385.393711015" observedRunningTime="2025-12-05 08:15:59.063251807 +0000 UTC m=+5386.789248847" watchObservedRunningTime="2025-12-05 08:15:59.078895787 +0000 UTC m=+5386.804892827" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.101000 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=1.774664265 podStartE2EDuration="2.100967662s" podCreationTimestamp="2025-12-05 08:15:57 +0000 UTC" firstStartedPulling="2025-12-05 08:15:58.230207744 +0000 UTC m=+5385.956204794" lastFinishedPulling="2025-12-05 08:15:58.556511151 +0000 UTC m=+5386.282508191" observedRunningTime="2025-12-05 08:15:59.087612968 +0000 UTC m=+5386.813610038" watchObservedRunningTime="2025-12-05 08:15:59.100967662 +0000 UTC m=+5386.826964702" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.352799 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.432733 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.432799 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d9nn\" (UniqueName: \"kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.432840 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.432899 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.432947 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.433036 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.433069 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data\") pod \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\" (UID: \"a2a1d8b4-82aa-4b19-bd61-38074852cbb7\") " Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.433343 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs" (OuterVolumeSpecName: "logs") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.433788 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.433820 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.438850 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts" (OuterVolumeSpecName: "scripts") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.440849 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.448652 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn" (OuterVolumeSpecName: "kube-api-access-4d9nn") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "kube-api-access-4d9nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.454597 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.485999 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.491968 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data" (OuterVolumeSpecName: "config-data") pod "a2a1d8b4-82aa-4b19-bd61-38074852cbb7" (UID: "a2a1d8b4-82aa-4b19-bd61-38074852cbb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536097 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d9nn\" (UniqueName: \"kubernetes.io/projected/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-kube-api-access-4d9nn\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536135 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536149 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536160 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536172 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 08:15:59 crc kubenswrapper[4863]: I1205 08:15:59.536185 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a1d8b4-82aa-4b19-bd61-38074852cbb7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.063047 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a2a1d8b4-82aa-4b19-bd61-38074852cbb7","Type":"ContainerDied","Data":"d8f84f6095e530d7e27cd092161ac0bb25f5eeee78d7fb10021ac1b191b3c24b"} Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.063416 4863 scope.go:117] "RemoveContainer" containerID="a16d831a6025bf2f98e64be7ee84fb6e1f9b33b8e3d36ac1aae1d28bc429bc6b" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.063081 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.117975 4863 scope.go:117] "RemoveContainer" containerID="1d9bcef128b395537dc26a66c0e28bafbb811c27a48c6e61f930b559a3b524db" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.149761 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.180375 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.188949 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:16:00 crc kubenswrapper[4863]: E1205 08:16:00.189784 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api-log" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.189813 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api-log" Dec 05 08:16:00 crc kubenswrapper[4863]: E1205 08:16:00.189841 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.189850 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.190117 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api-log" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.190155 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" containerName="cinder-api" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.191805 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.196076 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.198201 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254051 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254108 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc458e97-df3a-4597-b232-4508ce0a666c-logs\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254187 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254231 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254328 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-scripts\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254386 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc458e97-df3a-4597-b232-4508ce0a666c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.254443 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdssj\" (UniqueName: \"kubernetes.io/projected/bc458e97-df3a-4597-b232-4508ce0a666c-kube-api-access-vdssj\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.356674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-scripts\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.356868 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc458e97-df3a-4597-b232-4508ce0a666c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.356954 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdssj\" (UniqueName: \"kubernetes.io/projected/bc458e97-df3a-4597-b232-4508ce0a666c-kube-api-access-vdssj\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.357219 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc458e97-df3a-4597-b232-4508ce0a666c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.357368 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.357424 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc458e97-df3a-4597-b232-4508ce0a666c-logs\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.357497 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.357530 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.358407 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc458e97-df3a-4597-b232-4508ce0a666c-logs\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.363669 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.366918 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.373869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-scripts\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.375174 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458e97-df3a-4597-b232-4508ce0a666c-config-data\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.395951 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdssj\" (UniqueName: \"kubernetes.io/projected/bc458e97-df3a-4597-b232-4508ce0a666c-kube-api-access-vdssj\") pod \"cinder-api-0\" (UID: \"bc458e97-df3a-4597-b232-4508ce0a666c\") " pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.516429 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.629303 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a1d8b4-82aa-4b19-bd61-38074852cbb7" path="/var/lib/kubelet/pods/a2a1d8b4-82aa-4b19-bd61-38074852cbb7/volumes" Dec 05 08:16:00 crc kubenswrapper[4863]: I1205 08:16:00.975294 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.072226 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bc458e97-df3a-4597-b232-4508ce0a666c","Type":"ContainerStarted","Data":"e1375e63a13410bec4c145f6cb722c5de5de773443d9e61ade7beca73bd62217"} Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.451547 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.454466 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.454608 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.459403 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:16:01 crc kubenswrapper[4863]: I1205 08:16:01.741379 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.084421 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bc458e97-df3a-4597-b232-4508ce0a666c","Type":"ContainerStarted","Data":"8891e16788d2fa5cc8dfc6b7ca57d7d87da124a7e2bb447217c03c710a76e560"} Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.084492 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bc458e97-df3a-4597-b232-4508ce0a666c","Type":"ContainerStarted","Data":"708bc402e4a84e8a44c575d40bbdc45d66bdc199b3e40170cb2b74c87115dc6d"} Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.084553 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.084597 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.087495 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.111152 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.111133482 podStartE2EDuration="2.111133482s" podCreationTimestamp="2025-12-05 08:16:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:16:02.103034825 +0000 UTC m=+5389.829031865" watchObservedRunningTime="2025-12-05 08:16:02.111133482 +0000 UTC m=+5389.837130522" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.163888 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.166573 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.168031 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:16:02 crc kubenswrapper[4863]: I1205 08:16:02.527290 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 08:16:03 crc kubenswrapper[4863]: I1205 08:16:03.094340 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:16:04 crc kubenswrapper[4863]: I1205 08:16:04.661193 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 08:16:04 crc kubenswrapper[4863]: I1205 08:16:04.758508 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:05 crc kubenswrapper[4863]: I1205 08:16:05.108785 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="cinder-scheduler" containerID="cri-o://334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b" gracePeriod=30 Dec 05 08:16:05 crc kubenswrapper[4863]: I1205 08:16:05.109176 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="probe" containerID="cri-o://10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122" gracePeriod=30 Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.119706 4863 generic.go:334] "Generic (PLEG): container finished" podID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerID="10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122" exitCode=0 Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.119849 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerDied","Data":"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122"} Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.918812 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.979815 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.989569 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.989628 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.989719 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.990652 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.989749 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jjsn\" (UniqueName: \"kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.990857 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.990913 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts\") pod \"40ff9d7d-1e6e-41e5-a780-96842332b945\" (UID: \"40ff9d7d-1e6e-41e5-a780-96842332b945\") " Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.991638 4863 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ff9d7d-1e6e-41e5-a780-96842332b945-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:06 crc kubenswrapper[4863]: I1205 08:16:06.995337 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.021704 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn" (OuterVolumeSpecName: "kube-api-access-8jjsn") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "kube-api-access-8jjsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.021934 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts" (OuterVolumeSpecName: "scripts") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.064619 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.093660 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.093700 4863 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.093709 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.093719 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jjsn\" (UniqueName: \"kubernetes.io/projected/40ff9d7d-1e6e-41e5-a780-96842332b945-kube-api-access-8jjsn\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.110349 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data" (OuterVolumeSpecName: "config-data") pod "40ff9d7d-1e6e-41e5-a780-96842332b945" (UID: "40ff9d7d-1e6e-41e5-a780-96842332b945"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.133149 4863 generic.go:334] "Generic (PLEG): container finished" podID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerID="334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b" exitCode=0 Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.133195 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerDied","Data":"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b"} Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.133227 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ff9d7d-1e6e-41e5-a780-96842332b945","Type":"ContainerDied","Data":"96767808a0d8e378b8d89192146f89e39273b795f4a31908acacfef775bacaf5"} Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.133246 4863 scope.go:117] "RemoveContainer" containerID="10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.133393 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.158210 4863 scope.go:117] "RemoveContainer" containerID="334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.166528 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.184996 4863 scope.go:117] "RemoveContainer" containerID="10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.185381 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:07 crc kubenswrapper[4863]: E1205 08:16:07.188356 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122\": container with ID starting with 10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122 not found: ID does not exist" containerID="10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.188413 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122"} err="failed to get container status \"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122\": rpc error: code = NotFound desc = could not find container \"10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122\": container with ID starting with 10af9218e512517f0c4c018b68cb5e495bc932d2f734e212ead9c3a71cff7122 not found: ID does not exist" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.188442 4863 scope.go:117] "RemoveContainer" containerID="334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b" Dec 05 08:16:07 crc kubenswrapper[4863]: E1205 08:16:07.189183 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b\": container with ID starting with 334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b not found: ID does not exist" containerID="334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.189230 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b"} err="failed to get container status \"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b\": rpc error: code = NotFound desc = could not find container \"334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b\": container with ID starting with 334714c4744df6400a96949785a33a0cffdb6b5d8826d837073e464d0da0363b not found: ID does not exist" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.195575 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ff9d7d-1e6e-41e5-a780-96842332b945-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.212756 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:07 crc kubenswrapper[4863]: E1205 08:16:07.213163 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="cinder-scheduler" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.213179 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="cinder-scheduler" Dec 05 08:16:07 crc kubenswrapper[4863]: E1205 08:16:07.213207 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="probe" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.213215 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="probe" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.213394 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="cinder-scheduler" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.213434 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" containerName="probe" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.214402 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.216666 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.222980 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297408 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297585 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297649 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297760 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqgf9\" (UniqueName: \"kubernetes.io/projected/b73ecb8a-f56c-4ddf-ae08-a7514506f310-kube-api-access-sqgf9\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.297932 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b73ecb8a-f56c-4ddf-ae08-a7514506f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.399907 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400023 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400063 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400104 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400127 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqgf9\" (UniqueName: \"kubernetes.io/projected/b73ecb8a-f56c-4ddf-ae08-a7514506f310-kube-api-access-sqgf9\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400168 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b73ecb8a-f56c-4ddf-ae08-a7514506f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.400241 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b73ecb8a-f56c-4ddf-ae08-a7514506f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.404619 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.404899 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.405772 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.406944 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b73ecb8a-f56c-4ddf-ae08-a7514506f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.416776 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqgf9\" (UniqueName: \"kubernetes.io/projected/b73ecb8a-f56c-4ddf-ae08-a7514506f310-kube-api-access-sqgf9\") pod \"cinder-scheduler-0\" (UID: \"b73ecb8a-f56c-4ddf-ae08-a7514506f310\") " pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.530017 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.775353 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 08:16:07 crc kubenswrapper[4863]: I1205 08:16:07.980933 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:16:08 crc kubenswrapper[4863]: I1205 08:16:08.149007 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b73ecb8a-f56c-4ddf-ae08-a7514506f310","Type":"ContainerStarted","Data":"b5fe9bb7ceffeae33538b98df1a4fa6472a617c034eae5a8a9214d455ccf9d87"} Dec 05 08:16:08 crc kubenswrapper[4863]: I1205 08:16:08.618039 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40ff9d7d-1e6e-41e5-a780-96842332b945" path="/var/lib/kubelet/pods/40ff9d7d-1e6e-41e5-a780-96842332b945/volumes" Dec 05 08:16:09 crc kubenswrapper[4863]: I1205 08:16:09.166960 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b73ecb8a-f56c-4ddf-ae08-a7514506f310","Type":"ContainerStarted","Data":"5000a09558ffd661e608b830bf906486b3667991ee297f724e2cc8c82b1e39fe"} Dec 05 08:16:09 crc kubenswrapper[4863]: I1205 08:16:09.167021 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b73ecb8a-f56c-4ddf-ae08-a7514506f310","Type":"ContainerStarted","Data":"9baabe61eb942248e9b98860e7436d2a3db84e6de2506153696a81e163f57157"} Dec 05 08:16:09 crc kubenswrapper[4863]: I1205 08:16:09.203780 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.20375114 podStartE2EDuration="2.20375114s" podCreationTimestamp="2025-12-05 08:16:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:16:09.187512366 +0000 UTC m=+5396.913509416" watchObservedRunningTime="2025-12-05 08:16:09.20375114 +0000 UTC m=+5396.929748210" Dec 05 08:16:12 crc kubenswrapper[4863]: I1205 08:16:12.324428 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 08:16:12 crc kubenswrapper[4863]: I1205 08:16:12.530848 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 08:16:17 crc kubenswrapper[4863]: I1205 08:16:17.841098 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.069692 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-24f8-account-create-update-w4k26"] Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.080651 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-jzf6h"] Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.089496 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-24f8-account-create-update-w4k26"] Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.099567 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-jzf6h"] Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.612166 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aae0b60-50d8-497f-b764-1fa0999191be" path="/var/lib/kubelet/pods/4aae0b60-50d8-497f-b764-1fa0999191be/volumes" Dec 05 08:17:06 crc kubenswrapper[4863]: I1205 08:17:06.612790 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6159e7ab-c333-4262-a31f-6da90b2cb002" path="/var/lib/kubelet/pods/6159e7ab-c333-4262-a31f-6da90b2cb002/volumes" Dec 05 08:17:17 crc kubenswrapper[4863]: I1205 08:17:17.055701 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-fltbs"] Dec 05 08:17:17 crc kubenswrapper[4863]: I1205 08:17:17.069100 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-fltbs"] Dec 05 08:17:18 crc kubenswrapper[4863]: I1205 08:17:18.616321 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eded1eaa-ee68-40a5-86ef-aa6c45567e01" path="/var/lib/kubelet/pods/eded1eaa-ee68-40a5-86ef-aa6c45567e01/volumes" Dec 05 08:17:31 crc kubenswrapper[4863]: I1205 08:17:31.052004 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-zlpld"] Dec 05 08:17:31 crc kubenswrapper[4863]: I1205 08:17:31.067250 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-zlpld"] Dec 05 08:17:32 crc kubenswrapper[4863]: I1205 08:17:32.613944 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7a59583-e60d-4de4-88c7-7b50a9cb00c4" path="/var/lib/kubelet/pods/c7a59583-e60d-4de4-88c7-7b50a9cb00c4/volumes" Dec 05 08:17:34 crc kubenswrapper[4863]: I1205 08:17:34.758703 4863 scope.go:117] "RemoveContainer" containerID="1df9e5fc2a73d1c4cd83a9aae526fbcd6a091007698645149945fdfb19bc1d93" Dec 05 08:17:34 crc kubenswrapper[4863]: I1205 08:17:34.789380 4863 scope.go:117] "RemoveContainer" containerID="4c19ec8091607f0440ea3d6420243a43ed8f539e9e142f57a14988ddaeaed738" Dec 05 08:17:34 crc kubenswrapper[4863]: I1205 08:17:34.838456 4863 scope.go:117] "RemoveContainer" containerID="d5f76b78768df4699267611050f6c0e5d0b52488b062eff013f3bf98bb6e0266" Dec 05 08:17:34 crc kubenswrapper[4863]: I1205 08:17:34.893938 4863 scope.go:117] "RemoveContainer" containerID="e2405a0988ea44908f1b0509bdf24e66d8657ee21cdfbd05e7449a3a55fa5397" Dec 05 08:17:38 crc kubenswrapper[4863]: I1205 08:17:38.470378 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:17:38 crc kubenswrapper[4863]: I1205 08:17:38.471000 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.579158 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.586268 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.590502 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.590701 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-czb5c" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.591045 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.591198 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.626362 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.643952 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.644236 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-log" containerID="cri-o://3fe58194689ca75b14d6cb2b37b19b0755b691b5cd77b6c6d730b789866b478f" gracePeriod=30 Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.644826 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-httpd" containerID="cri-o://1b7fc830435442fcca8fdaa12a2fe860cf52dfd7d24d2bc3a015e88c10f48366" gracePeriod=30 Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.676212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.676604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pjpv\" (UniqueName: \"kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.676760 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.677092 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.677236 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.688826 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.690452 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.707429 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.751563 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.751780 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-log" containerID="cri-o://04323d52a4e25f7e74be5f021c0efcf981dc57e74f9da5997135290aaa2dbcf5" gracePeriod=30 Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.752105 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-httpd" containerID="cri-o://4e810214e6b7402ed65327bc66e35e74b3c9a7e8f5a8d84e09e9a04ed0a1e162" gracePeriod=30 Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782150 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782411 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782442 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782520 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782546 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pjpv\" (UniqueName: \"kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782601 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782645 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782714 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782736 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.782765 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq54m\" (UniqueName: \"kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.785725 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.786172 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.786839 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.791825 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.809132 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pjpv\" (UniqueName: \"kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv\") pod \"horizon-556d4689cc-twtq8\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.884805 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.884898 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.884946 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.884962 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.884987 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq54m\" (UniqueName: \"kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.885224 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.885795 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.886334 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.889045 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.900962 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq54m\" (UniqueName: \"kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m\") pod \"horizon-7b6d5d985c-2q79b\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:50 crc kubenswrapper[4863]: I1205 08:17:50.912246 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.043668 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.220058 4863 generic.go:334] "Generic (PLEG): container finished" podID="60e25931-63ca-4875-b797-b235d731d4e0" containerID="04323d52a4e25f7e74be5f021c0efcf981dc57e74f9da5997135290aaa2dbcf5" exitCode=143 Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.220098 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerDied","Data":"04323d52a4e25f7e74be5f021c0efcf981dc57e74f9da5997135290aaa2dbcf5"} Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.222431 4863 generic.go:334] "Generic (PLEG): container finished" podID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerID="3fe58194689ca75b14d6cb2b37b19b0755b691b5cd77b6c6d730b789866b478f" exitCode=143 Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.222492 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerDied","Data":"3fe58194689ca75b14d6cb2b37b19b0755b691b5cd77b6c6d730b789866b478f"} Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.376637 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.381107 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.500150 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.521125 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.549734 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.551241 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.570229 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.705294 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.705603 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.705649 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.705713 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk25h\" (UniqueName: \"kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.708185 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.811012 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk25h\" (UniqueName: \"kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.811356 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.811439 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.811464 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.811549 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.814016 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.814996 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.815488 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.822775 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.834787 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk25h\" (UniqueName: \"kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h\") pod \"horizon-68d5d5cd95-xjd79\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:51 crc kubenswrapper[4863]: I1205 08:17:51.882732 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:17:52 crc kubenswrapper[4863]: I1205 08:17:52.233686 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerStarted","Data":"19bb88f7a24737c863b5c11346bee650e6da693472b077f48b8f5a5b2366ea6a"} Dec 05 08:17:52 crc kubenswrapper[4863]: I1205 08:17:52.234990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerStarted","Data":"08ef7c16e964982598b149ed0de9d4cffa878aa4d1885da6eb23b6ae1d5276b1"} Dec 05 08:17:52 crc kubenswrapper[4863]: I1205 08:17:52.326973 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:17:52 crc kubenswrapper[4863]: W1205 08:17:52.339217 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1122499e_4014_4f82_a841_09f6417b7bb2.slice/crio-21401449d7b649d6d6e0ad00d349e129efe93493c1a55197a72c9aad0f4c8a3c WatchSource:0}: Error finding container 21401449d7b649d6d6e0ad00d349e129efe93493c1a55197a72c9aad0f4c8a3c: Status 404 returned error can't find the container with id 21401449d7b649d6d6e0ad00d349e129efe93493c1a55197a72c9aad0f4c8a3c Dec 05 08:17:53 crc kubenswrapper[4863]: I1205 08:17:53.248790 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerStarted","Data":"21401449d7b649d6d6e0ad00d349e129efe93493c1a55197a72c9aad0f4c8a3c"} Dec 05 08:17:54 crc kubenswrapper[4863]: I1205 08:17:54.263841 4863 generic.go:334] "Generic (PLEG): container finished" podID="60e25931-63ca-4875-b797-b235d731d4e0" containerID="4e810214e6b7402ed65327bc66e35e74b3c9a7e8f5a8d84e09e9a04ed0a1e162" exitCode=0 Dec 05 08:17:54 crc kubenswrapper[4863]: I1205 08:17:54.263930 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerDied","Data":"4e810214e6b7402ed65327bc66e35e74b3c9a7e8f5a8d84e09e9a04ed0a1e162"} Dec 05 08:17:55 crc kubenswrapper[4863]: I1205 08:17:55.277847 4863 generic.go:334] "Generic (PLEG): container finished" podID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerID="1b7fc830435442fcca8fdaa12a2fe860cf52dfd7d24d2bc3a015e88c10f48366" exitCode=0 Dec 05 08:17:55 crc kubenswrapper[4863]: I1205 08:17:55.277937 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerDied","Data":"1b7fc830435442fcca8fdaa12a2fe860cf52dfd7d24d2bc3a015e88c10f48366"} Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.524277 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.534209 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.682798 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.682859 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.682950 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683058 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683088 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683111 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683133 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgjz5\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5\") pod \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\" (UID: \"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683188 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683256 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683289 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683325 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck6dg\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683358 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683422 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683436 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.683507 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph\") pod \"60e25931-63ca-4875-b797-b235d731d4e0\" (UID: \"60e25931-63ca-4875-b797-b235d731d4e0\") " Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.684024 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.684090 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs" (OuterVolumeSpecName: "logs") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.684353 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs" (OuterVolumeSpecName: "logs") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.686358 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.688843 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts" (OuterVolumeSpecName: "scripts") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.689533 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph" (OuterVolumeSpecName: "ceph") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.695241 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts" (OuterVolumeSpecName: "scripts") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.714640 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg" (OuterVolumeSpecName: "kube-api-access-ck6dg") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "kube-api-access-ck6dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.726975 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph" (OuterVolumeSpecName: "ceph") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.735953 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5" (OuterVolumeSpecName: "kube-api-access-cgjz5") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "kube-api-access-cgjz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.752448 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790889 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790922 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790935 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790945 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790956 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgjz5\" (UniqueName: \"kubernetes.io/projected/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-kube-api-access-cgjz5\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790968 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790981 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.790990 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.791000 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck6dg\" (UniqueName: \"kubernetes.io/projected/60e25931-63ca-4875-b797-b235d731d4e0-kube-api-access-ck6dg\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.791009 4863 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60e25931-63ca-4875-b797-b235d731d4e0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.793140 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.816432 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data" (OuterVolumeSpecName: "config-data") pod "60e25931-63ca-4875-b797-b235d731d4e0" (UID: "60e25931-63ca-4875-b797-b235d731d4e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.819301 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data" (OuterVolumeSpecName: "config-data") pod "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" (UID: "b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.892971 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60e25931-63ca-4875-b797-b235d731d4e0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.893001 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:00 crc kubenswrapper[4863]: I1205 08:18:00.893015 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.332715 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60e25931-63ca-4875-b797-b235d731d4e0","Type":"ContainerDied","Data":"b9ac35a9de351187f97e3eaa6733734c2ec440e3b830595c2cff2244969af543"} Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.332758 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.332785 4863 scope.go:117] "RemoveContainer" containerID="4e810214e6b7402ed65327bc66e35e74b3c9a7e8f5a8d84e09e9a04ed0a1e162" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.336578 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1","Type":"ContainerDied","Data":"e8f9bdce3112713fb501bbe6cda764fb04c691b84e96f6688cb25ca0a6010a87"} Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.336739 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.363024 4863 scope.go:117] "RemoveContainer" containerID="04323d52a4e25f7e74be5f021c0efcf981dc57e74f9da5997135290aaa2dbcf5" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.379322 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.389813 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.399744 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.411495 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432134 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: E1205 08:18:01.432625 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432642 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: E1205 08:18:01.432666 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432674 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: E1205 08:18:01.432692 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432699 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: E1205 08:18:01.432716 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432723 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432952 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432972 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e25931-63ca-4875-b797-b235d731d4e0" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432984 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-httpd" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.432997 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" containerName="glance-log" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.434106 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.442687 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.442724 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-pkhn6" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.442968 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.443647 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.454110 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.456290 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.459493 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.465708 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.467596 4863 scope.go:117] "RemoveContainer" containerID="1b7fc830435442fcca8fdaa12a2fe860cf52dfd7d24d2bc3a015e88c10f48366" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502555 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-config-data\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502670 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502720 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502858 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-scripts\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502938 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-ceph\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.502963 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-logs\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.503076 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp2t2\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-kube-api-access-fp2t2\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.520683 4863 scope.go:117] "RemoveContainer" containerID="3fe58194689ca75b14d6cb2b37b19b0755b691b5cd77b6c6d730b789866b478f" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.605433 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606037 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606084 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606134 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-config-data\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606166 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606187 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-scripts\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606229 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-ceph\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606394 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-logs\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606438 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-scripts\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606529 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-logs\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606579 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp2t2\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-kube-api-access-fp2t2\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606732 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6gxm\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-kube-api-access-c6gxm\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606766 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-ceph\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.606819 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-config-data\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.608878 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.608992 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-logs\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.612579 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-ceph\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.614105 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-config-data\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.615513 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-scripts\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.616180 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.627199 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp2t2\" (UniqueName: \"kubernetes.io/projected/a65d4bd4-3e15-4293-a421-bbc89fbbdab8-kube-api-access-fp2t2\") pod \"glance-default-external-api-0\" (UID: \"a65d4bd4-3e15-4293-a421-bbc89fbbdab8\") " pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.708692 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.708760 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-config-data\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.708779 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.709404 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-scripts\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.709764 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.709831 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-logs\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.709882 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6gxm\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-kube-api-access-c6gxm\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.709911 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-ceph\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.711650 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-logs\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.712551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-scripts\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.713864 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.715262 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-ceph\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.715996 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-config-data\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.726772 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6gxm\" (UniqueName: \"kubernetes.io/projected/87e9e170-a556-46aa-8d0a-b0b1fb6328ab-kube-api-access-c6gxm\") pod \"glance-default-internal-api-0\" (UID: \"87e9e170-a556-46aa-8d0a-b0b1fb6328ab\") " pod="openstack/glance-default-internal-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.824992 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 08:18:01 crc kubenswrapper[4863]: I1205 08:18:01.831855 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.351857 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerStarted","Data":"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.352094 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerStarted","Data":"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.356939 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.359182 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerStarted","Data":"2976ad557659eeb945451f7e5b5be07e3ed642bf495a821731461ea3ff64f5c1"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.359223 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerStarted","Data":"6f71043dac5ebbdba45bb57b521ab6a3c7869c12c368df8c5b80d3f614ba6efc"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.364190 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerStarted","Data":"65220b63d7a8f6135da7d34f395eca4c42700fa837d7dbddce19ead134c7a9db"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.364225 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerStarted","Data":"694a4ab2887f45d93ce5f192f3edd32889f73433d2c914987e79a406ddf26611"} Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.364317 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-556d4689cc-twtq8" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon-log" containerID="cri-o://694a4ab2887f45d93ce5f192f3edd32889f73433d2c914987e79a406ddf26611" gracePeriod=30 Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.364392 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-556d4689cc-twtq8" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon" containerID="cri-o://65220b63d7a8f6135da7d34f395eca4c42700fa837d7dbddce19ead134c7a9db" gracePeriod=30 Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.373817 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-68d5d5cd95-xjd79" podStartSLOduration=3.221885749 podStartE2EDuration="11.373800439s" podCreationTimestamp="2025-12-05 08:17:51 +0000 UTC" firstStartedPulling="2025-12-05 08:17:52.341400751 +0000 UTC m=+5500.067397781" lastFinishedPulling="2025-12-05 08:18:00.493315431 +0000 UTC m=+5508.219312471" observedRunningTime="2025-12-05 08:18:02.372876246 +0000 UTC m=+5510.098873316" watchObservedRunningTime="2025-12-05 08:18:02.373800439 +0000 UTC m=+5510.099797479" Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.397987 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b6d5d985c-2q79b" podStartSLOduration=3.359797781 podStartE2EDuration="12.397964125s" podCreationTimestamp="2025-12-05 08:17:50 +0000 UTC" firstStartedPulling="2025-12-05 08:17:51.500275411 +0000 UTC m=+5499.226272451" lastFinishedPulling="2025-12-05 08:18:00.538441755 +0000 UTC m=+5508.264438795" observedRunningTime="2025-12-05 08:18:02.392490232 +0000 UTC m=+5510.118487292" watchObservedRunningTime="2025-12-05 08:18:02.397964125 +0000 UTC m=+5510.123961165" Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.420287 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-556d4689cc-twtq8" podStartSLOduration=3.207788154 podStartE2EDuration="12.420270567s" podCreationTimestamp="2025-12-05 08:17:50 +0000 UTC" firstStartedPulling="2025-12-05 08:17:51.380891955 +0000 UTC m=+5499.106888985" lastFinishedPulling="2025-12-05 08:18:00.593374358 +0000 UTC m=+5508.319371398" observedRunningTime="2025-12-05 08:18:02.41711415 +0000 UTC m=+5510.143111190" watchObservedRunningTime="2025-12-05 08:18:02.420270567 +0000 UTC m=+5510.146267607" Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.474744 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 08:18:02 crc kubenswrapper[4863]: W1205 08:18:02.476688 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda65d4bd4_3e15_4293_a421_bbc89fbbdab8.slice/crio-04c97b4c40e850240e08c7ff41acf14ff66f3cb307af4d91e746e3f08d175761 WatchSource:0}: Error finding container 04c97b4c40e850240e08c7ff41acf14ff66f3cb307af4d91e746e3f08d175761: Status 404 returned error can't find the container with id 04c97b4c40e850240e08c7ff41acf14ff66f3cb307af4d91e746e3f08d175761 Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.611043 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60e25931-63ca-4875-b797-b235d731d4e0" path="/var/lib/kubelet/pods/60e25931-63ca-4875-b797-b235d731d4e0/volumes" Dec 05 08:18:02 crc kubenswrapper[4863]: I1205 08:18:02.611974 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1" path="/var/lib/kubelet/pods/b8fb13c4-b688-4f81-a02a-f7c5b10aa4e1/volumes" Dec 05 08:18:03 crc kubenswrapper[4863]: I1205 08:18:03.388247 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a65d4bd4-3e15-4293-a421-bbc89fbbdab8","Type":"ContainerStarted","Data":"6edabcccf810eb713b5553ea9066f4e5633c8f15d58db879dceae7f9eba80697"} Dec 05 08:18:03 crc kubenswrapper[4863]: I1205 08:18:03.388617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a65d4bd4-3e15-4293-a421-bbc89fbbdab8","Type":"ContainerStarted","Data":"04c97b4c40e850240e08c7ff41acf14ff66f3cb307af4d91e746e3f08d175761"} Dec 05 08:18:03 crc kubenswrapper[4863]: I1205 08:18:03.390398 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"87e9e170-a556-46aa-8d0a-b0b1fb6328ab","Type":"ContainerStarted","Data":"e10908082b0e847bf2289b56f448bf25342114c9602090a5c08a7b05fedcd121"} Dec 05 08:18:03 crc kubenswrapper[4863]: I1205 08:18:03.390463 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"87e9e170-a556-46aa-8d0a-b0b1fb6328ab","Type":"ContainerStarted","Data":"a6e12f1a938abd2817508e19946e9df5b37d5a739221ed33b1373afb0100d9da"} Dec 05 08:18:04 crc kubenswrapper[4863]: I1205 08:18:04.398489 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"87e9e170-a556-46aa-8d0a-b0b1fb6328ab","Type":"ContainerStarted","Data":"e6d52152ec75f51a876aaad9ac54f08db026ac5c19c435192df0085a55e291c8"} Dec 05 08:18:04 crc kubenswrapper[4863]: I1205 08:18:04.400906 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a65d4bd4-3e15-4293-a421-bbc89fbbdab8","Type":"ContainerStarted","Data":"7b4dcae306107e172d33f0a5ce81920c49a0f2848e0f29ec81218e9b58ef5f00"} Dec 05 08:18:04 crc kubenswrapper[4863]: I1205 08:18:04.455456 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.455432729 podStartE2EDuration="3.455432729s" podCreationTimestamp="2025-12-05 08:18:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:18:04.443202252 +0000 UTC m=+5512.169199302" watchObservedRunningTime="2025-12-05 08:18:04.455432729 +0000 UTC m=+5512.181429789" Dec 05 08:18:04 crc kubenswrapper[4863]: I1205 08:18:04.460058 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.4600412 podStartE2EDuration="3.4600412s" podCreationTimestamp="2025-12-05 08:18:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:18:04.423147635 +0000 UTC m=+5512.149144685" watchObservedRunningTime="2025-12-05 08:18:04.4600412 +0000 UTC m=+5512.186038250" Dec 05 08:18:08 crc kubenswrapper[4863]: I1205 08:18:08.464820 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:18:08 crc kubenswrapper[4863]: I1205 08:18:08.465398 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:18:10 crc kubenswrapper[4863]: I1205 08:18:10.913196 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.044652 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.044703 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.825835 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.825925 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.833312 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.833380 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.869554 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.875534 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.884825 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.884947 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.892097 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 08:18:11 crc kubenswrapper[4863]: I1205 08:18:11.892246 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:12 crc kubenswrapper[4863]: I1205 08:18:12.469977 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 08:18:12 crc kubenswrapper[4863]: I1205 08:18:12.470080 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:12 crc kubenswrapper[4863]: I1205 08:18:12.470099 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 08:18:12 crc kubenswrapper[4863]: I1205 08:18:12.470110 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:14 crc kubenswrapper[4863]: I1205 08:18:14.923769 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 08:18:14 crc kubenswrapper[4863]: I1205 08:18:14.924144 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:18:14 crc kubenswrapper[4863]: I1205 08:18:14.940836 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 08:18:15 crc kubenswrapper[4863]: I1205 08:18:15.023826 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:15 crc kubenswrapper[4863]: I1205 08:18:15.024814 4863 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:18:15 crc kubenswrapper[4863]: I1205 08:18:15.026653 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 08:18:21 crc kubenswrapper[4863]: I1205 08:18:21.046037 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.86:8080: connect: connection refused" Dec 05 08:18:21 crc kubenswrapper[4863]: I1205 08:18:21.884825 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Dec 05 08:18:32 crc kubenswrapper[4863]: I1205 08:18:32.677582 4863 generic.go:334] "Generic (PLEG): container finished" podID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerID="65220b63d7a8f6135da7d34f395eca4c42700fa837d7dbddce19ead134c7a9db" exitCode=137 Dec 05 08:18:32 crc kubenswrapper[4863]: I1205 08:18:32.677987 4863 generic.go:334] "Generic (PLEG): container finished" podID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerID="694a4ab2887f45d93ce5f192f3edd32889f73433d2c914987e79a406ddf26611" exitCode=137 Dec 05 08:18:32 crc kubenswrapper[4863]: I1205 08:18:32.677799 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerDied","Data":"65220b63d7a8f6135da7d34f395eca4c42700fa837d7dbddce19ead134c7a9db"} Dec 05 08:18:32 crc kubenswrapper[4863]: I1205 08:18:32.678024 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerDied","Data":"694a4ab2887f45d93ce5f192f3edd32889f73433d2c914987e79a406ddf26611"} Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.091597 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.197099 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pjpv\" (UniqueName: \"kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv\") pod \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.197327 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs\") pod \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.197376 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts\") pod \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.197414 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data\") pod \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.197432 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key\") pod \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\" (UID: \"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e\") " Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.198422 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs" (OuterVolumeSpecName: "logs") pod "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" (UID: "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.205821 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv" (OuterVolumeSpecName: "kube-api-access-2pjpv") pod "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" (UID: "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e"). InnerVolumeSpecName "kube-api-access-2pjpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.212690 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" (UID: "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.230283 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts" (OuterVolumeSpecName: "scripts") pod "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" (UID: "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.230608 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data" (OuterVolumeSpecName: "config-data") pod "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" (UID: "bf2c0a5a-3643-4a95-8ac3-75b27a8c989e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.299636 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.299678 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.299695 4863 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.299709 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pjpv\" (UniqueName: \"kubernetes.io/projected/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-kube-api-access-2pjpv\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.299722 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.331681 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.688302 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-556d4689cc-twtq8" event={"ID":"bf2c0a5a-3643-4a95-8ac3-75b27a8c989e","Type":"ContainerDied","Data":"08ef7c16e964982598b149ed0de9d4cffa878aa4d1885da6eb23b6ae1d5276b1"} Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.688727 4863 scope.go:117] "RemoveContainer" containerID="65220b63d7a8f6135da7d34f395eca4c42700fa837d7dbddce19ead134c7a9db" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.688505 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-556d4689cc-twtq8" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.728112 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.738583 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-556d4689cc-twtq8"] Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.840227 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:18:33 crc kubenswrapper[4863]: I1205 08:18:33.876049 4863 scope.go:117] "RemoveContainer" containerID="694a4ab2887f45d93ce5f192f3edd32889f73433d2c914987e79a406ddf26611" Dec 05 08:18:34 crc kubenswrapper[4863]: I1205 08:18:34.617226 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" path="/var/lib/kubelet/pods/bf2c0a5a-3643-4a95-8ac3-75b27a8c989e/volumes" Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.004451 4863 scope.go:117] "RemoveContainer" containerID="45fb708c2a9c45d69560c253f03ec7d2db3454fa73adbea056da3fac66acaa59" Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.030346 4863 scope.go:117] "RemoveContainer" containerID="be8e0b8ea7e2b7ec3fc5170796e2738a6dbf2266f87ce31e6ee9fe19634c8d13" Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.131249 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.649920 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.717401 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.717705 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon-log" containerID="cri-o://6f71043dac5ebbdba45bb57b521ab6a3c7869c12c368df8c5b80d3f614ba6efc" gracePeriod=30 Dec 05 08:18:35 crc kubenswrapper[4863]: I1205 08:18:35.717825 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" containerID="cri-o://2976ad557659eeb945451f7e5b5be07e3ed642bf495a821731461ea3ff64f5c1" gracePeriod=30 Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.464352 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.464980 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.465027 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.465922 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.465987 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb" gracePeriod=600 Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.759872 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb" exitCode=0 Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.759917 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb"} Dec 05 08:18:38 crc kubenswrapper[4863]: I1205 08:18:38.759957 4863 scope.go:117] "RemoveContainer" containerID="d6710886466178dc0470fcdbc88bdc71f14ae22ee73a005e9e8f74b7c48a80f2" Dec 05 08:18:39 crc kubenswrapper[4863]: I1205 08:18:39.769304 4863 generic.go:334] "Generic (PLEG): container finished" podID="391ee1e8-554d-4025-af59-dd0411eb7216" containerID="2976ad557659eeb945451f7e5b5be07e3ed642bf495a821731461ea3ff64f5c1" exitCode=0 Dec 05 08:18:39 crc kubenswrapper[4863]: I1205 08:18:39.769392 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerDied","Data":"2976ad557659eeb945451f7e5b5be07e3ed642bf495a821731461ea3ff64f5c1"} Dec 05 08:18:39 crc kubenswrapper[4863]: I1205 08:18:39.772118 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c"} Dec 05 08:18:41 crc kubenswrapper[4863]: I1205 08:18:41.045202 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.86:8080: connect: connection refused" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.774341 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:18:46 crc kubenswrapper[4863]: E1205 08:18:46.775302 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.775319 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon" Dec 05 08:18:46 crc kubenswrapper[4863]: E1205 08:18:46.775349 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon-log" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.775357 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon-log" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.775603 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.775621 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2c0a5a-3643-4a95-8ac3-75b27a8c989e" containerName="horizon-log" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.778056 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.797304 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.848121 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.848312 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkdq5\" (UniqueName: \"kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.848392 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.949791 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.949884 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkdq5\" (UniqueName: \"kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.949913 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.950426 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.950456 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:46 crc kubenswrapper[4863]: I1205 08:18:46.976803 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkdq5\" (UniqueName: \"kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5\") pod \"redhat-marketplace-t76ls\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:47 crc kubenswrapper[4863]: I1205 08:18:47.122968 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:47 crc kubenswrapper[4863]: I1205 08:18:47.550477 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:18:47 crc kubenswrapper[4863]: I1205 08:18:47.846367 4863 generic.go:334] "Generic (PLEG): container finished" podID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerID="54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e" exitCode=0 Dec 05 08:18:47 crc kubenswrapper[4863]: I1205 08:18:47.846427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerDied","Data":"54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e"} Dec 05 08:18:47 crc kubenswrapper[4863]: I1205 08:18:47.846674 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerStarted","Data":"83e0128a5baa5584faab44607647456784d2ba19c2904722cf20cb6af82d6243"} Dec 05 08:18:48 crc kubenswrapper[4863]: I1205 08:18:48.858110 4863 generic.go:334] "Generic (PLEG): container finished" podID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerID="acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5" exitCode=0 Dec 05 08:18:48 crc kubenswrapper[4863]: I1205 08:18:48.858224 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerDied","Data":"acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5"} Dec 05 08:18:49 crc kubenswrapper[4863]: I1205 08:18:49.868605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerStarted","Data":"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae"} Dec 05 08:18:49 crc kubenswrapper[4863]: I1205 08:18:49.887201 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t76ls" podStartSLOduration=2.438608518 podStartE2EDuration="3.887187687s" podCreationTimestamp="2025-12-05 08:18:46 +0000 UTC" firstStartedPulling="2025-12-05 08:18:47.848359595 +0000 UTC m=+5555.574356635" lastFinishedPulling="2025-12-05 08:18:49.296938754 +0000 UTC m=+5557.022935804" observedRunningTime="2025-12-05 08:18:49.883031566 +0000 UTC m=+5557.609028616" watchObservedRunningTime="2025-12-05 08:18:49.887187687 +0000 UTC m=+5557.613184727" Dec 05 08:18:51 crc kubenswrapper[4863]: I1205 08:18:51.045788 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.86:8080: connect: connection refused" Dec 05 08:18:57 crc kubenswrapper[4863]: I1205 08:18:57.124605 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:57 crc kubenswrapper[4863]: I1205 08:18:57.125182 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:57 crc kubenswrapper[4863]: I1205 08:18:57.168859 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:57 crc kubenswrapper[4863]: I1205 08:18:57.988311 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:18:58 crc kubenswrapper[4863]: I1205 08:18:58.042175 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:18:59 crc kubenswrapper[4863]: I1205 08:18:59.958565 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t76ls" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="registry-server" containerID="cri-o://c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae" gracePeriod=2 Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.470535 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.608552 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content\") pod \"dc238dab-6546-44b1-b74c-ddf6c894c46e\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.608654 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities\") pod \"dc238dab-6546-44b1-b74c-ddf6c894c46e\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.608691 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkdq5\" (UniqueName: \"kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5\") pod \"dc238dab-6546-44b1-b74c-ddf6c894c46e\" (UID: \"dc238dab-6546-44b1-b74c-ddf6c894c46e\") " Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.609333 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities" (OuterVolumeSpecName: "utilities") pod "dc238dab-6546-44b1-b74c-ddf6c894c46e" (UID: "dc238dab-6546-44b1-b74c-ddf6c894c46e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.610161 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.616944 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5" (OuterVolumeSpecName: "kube-api-access-lkdq5") pod "dc238dab-6546-44b1-b74c-ddf6c894c46e" (UID: "dc238dab-6546-44b1-b74c-ddf6c894c46e"). InnerVolumeSpecName "kube-api-access-lkdq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.624405 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc238dab-6546-44b1-b74c-ddf6c894c46e" (UID: "dc238dab-6546-44b1-b74c-ddf6c894c46e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.712302 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc238dab-6546-44b1-b74c-ddf6c894c46e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.712346 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkdq5\" (UniqueName: \"kubernetes.io/projected/dc238dab-6546-44b1-b74c-ddf6c894c46e-kube-api-access-lkdq5\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.972843 4863 generic.go:334] "Generic (PLEG): container finished" podID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerID="c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae" exitCode=0 Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.972931 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerDied","Data":"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae"} Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.973083 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t76ls" event={"ID":"dc238dab-6546-44b1-b74c-ddf6c894c46e","Type":"ContainerDied","Data":"83e0128a5baa5584faab44607647456784d2ba19c2904722cf20cb6af82d6243"} Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.973106 4863 scope.go:117] "RemoveContainer" containerID="c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae" Dec 05 08:19:00 crc kubenswrapper[4863]: I1205 08:19:00.972955 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t76ls" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.000000 4863 scope.go:117] "RemoveContainer" containerID="acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.028079 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.036048 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t76ls"] Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.038948 4863 scope.go:117] "RemoveContainer" containerID="54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.047852 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b6d5d985c-2q79b" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.86:8080: connect: connection refused" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.048008 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.086276 4863 scope.go:117] "RemoveContainer" containerID="c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae" Dec 05 08:19:01 crc kubenswrapper[4863]: E1205 08:19:01.086836 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae\": container with ID starting with c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae not found: ID does not exist" containerID="c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.086870 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae"} err="failed to get container status \"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae\": rpc error: code = NotFound desc = could not find container \"c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae\": container with ID starting with c5c7ef3efadf17affe9491a14a371d6210dfe410d9e74c199e80931b31b05eae not found: ID does not exist" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.086912 4863 scope.go:117] "RemoveContainer" containerID="acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5" Dec 05 08:19:01 crc kubenswrapper[4863]: E1205 08:19:01.087278 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5\": container with ID starting with acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5 not found: ID does not exist" containerID="acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.087331 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5"} err="failed to get container status \"acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5\": rpc error: code = NotFound desc = could not find container \"acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5\": container with ID starting with acdac8b58e1ad879fbd46c4f0e1256e90bbc0e177ec5d5ec539183c42a5504a5 not found: ID does not exist" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.087364 4863 scope.go:117] "RemoveContainer" containerID="54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e" Dec 05 08:19:01 crc kubenswrapper[4863]: E1205 08:19:01.087848 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e\": container with ID starting with 54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e not found: ID does not exist" containerID="54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e" Dec 05 08:19:01 crc kubenswrapper[4863]: I1205 08:19:01.087886 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e"} err="failed to get container status \"54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e\": rpc error: code = NotFound desc = could not find container \"54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e\": container with ID starting with 54372c4d7925c75c332d27cb7e95ca543a48cda39bd689ffb7689df5fc3da40e not found: ID does not exist" Dec 05 08:19:02 crc kubenswrapper[4863]: I1205 08:19:02.620372 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" path="/var/lib/kubelet/pods/dc238dab-6546-44b1-b74c-ddf6c894c46e/volumes" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.020875 4863 generic.go:334] "Generic (PLEG): container finished" podID="391ee1e8-554d-4025-af59-dd0411eb7216" containerID="6f71043dac5ebbdba45bb57b521ab6a3c7869c12c368df8c5b80d3f614ba6efc" exitCode=137 Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.021001 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerDied","Data":"6f71043dac5ebbdba45bb57b521ab6a3c7869c12c368df8c5b80d3f614ba6efc"} Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.165599 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.344914 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key\") pod \"391ee1e8-554d-4025-af59-dd0411eb7216\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.345208 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq54m\" (UniqueName: \"kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m\") pod \"391ee1e8-554d-4025-af59-dd0411eb7216\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.345253 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs\") pod \"391ee1e8-554d-4025-af59-dd0411eb7216\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.345354 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts\") pod \"391ee1e8-554d-4025-af59-dd0411eb7216\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.345390 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data\") pod \"391ee1e8-554d-4025-af59-dd0411eb7216\" (UID: \"391ee1e8-554d-4025-af59-dd0411eb7216\") " Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.347181 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs" (OuterVolumeSpecName: "logs") pod "391ee1e8-554d-4025-af59-dd0411eb7216" (UID: "391ee1e8-554d-4025-af59-dd0411eb7216"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.347803 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391ee1e8-554d-4025-af59-dd0411eb7216-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.353481 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m" (OuterVolumeSpecName: "kube-api-access-tq54m") pod "391ee1e8-554d-4025-af59-dd0411eb7216" (UID: "391ee1e8-554d-4025-af59-dd0411eb7216"). InnerVolumeSpecName "kube-api-access-tq54m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.353646 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "391ee1e8-554d-4025-af59-dd0411eb7216" (UID: "391ee1e8-554d-4025-af59-dd0411eb7216"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.371101 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data" (OuterVolumeSpecName: "config-data") pod "391ee1e8-554d-4025-af59-dd0411eb7216" (UID: "391ee1e8-554d-4025-af59-dd0411eb7216"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.374275 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts" (OuterVolumeSpecName: "scripts") pod "391ee1e8-554d-4025-af59-dd0411eb7216" (UID: "391ee1e8-554d-4025-af59-dd0411eb7216"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.449674 4863 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/391ee1e8-554d-4025-af59-dd0411eb7216-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.449713 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq54m\" (UniqueName: \"kubernetes.io/projected/391ee1e8-554d-4025-af59-dd0411eb7216-kube-api-access-tq54m\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.449725 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:06 crc kubenswrapper[4863]: I1205 08:19:06.449741 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/391ee1e8-554d-4025-af59-dd0411eb7216-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.038952 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6d5d985c-2q79b" event={"ID":"391ee1e8-554d-4025-af59-dd0411eb7216","Type":"ContainerDied","Data":"19bb88f7a24737c863b5c11346bee650e6da693472b077f48b8f5a5b2366ea6a"} Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.039276 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6d5d985c-2q79b" Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.039641 4863 scope.go:117] "RemoveContainer" containerID="2976ad557659eeb945451f7e5b5be07e3ed642bf495a821731461ea3ff64f5c1" Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.074522 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.084544 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b6d5d985c-2q79b"] Dec 05 08:19:07 crc kubenswrapper[4863]: I1205 08:19:07.244784 4863 scope.go:117] "RemoveContainer" containerID="6f71043dac5ebbdba45bb57b521ab6a3c7869c12c368df8c5b80d3f614ba6efc" Dec 05 08:19:08 crc kubenswrapper[4863]: I1205 08:19:08.612990 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" path="/var/lib/kubelet/pods/391ee1e8-554d-4025-af59-dd0411eb7216/volumes" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.946029 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:24 crc kubenswrapper[4863]: E1205 08:19:24.947680 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="extract-content" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.947711 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="extract-content" Dec 05 08:19:24 crc kubenswrapper[4863]: E1205 08:19:24.947764 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="extract-utilities" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.947782 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="extract-utilities" Dec 05 08:19:24 crc kubenswrapper[4863]: E1205 08:19:24.947818 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.947836 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" Dec 05 08:19:24 crc kubenswrapper[4863]: E1205 08:19:24.947872 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon-log" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.947888 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon-log" Dec 05 08:19:24 crc kubenswrapper[4863]: E1205 08:19:24.947950 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="registry-server" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.947966 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="registry-server" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.948421 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc238dab-6546-44b1-b74c-ddf6c894c46e" containerName="registry-server" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.948458 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.948541 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="391ee1e8-554d-4025-af59-dd0411eb7216" containerName="horizon-log" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.957277 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:24 crc kubenswrapper[4863]: I1205 08:19:24.971592 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.008642 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.008851 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.008893 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvt66\" (UniqueName: \"kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.110477 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.110521 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvt66\" (UniqueName: \"kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.110601 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.111045 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.111131 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.133858 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvt66\" (UniqueName: \"kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66\") pod \"redhat-operators-d9sg4\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.297071 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:25 crc kubenswrapper[4863]: I1205 08:19:25.811309 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:26 crc kubenswrapper[4863]: I1205 08:19:26.210757 4863 generic.go:334] "Generic (PLEG): container finished" podID="48bf6874-8dee-4478-87ad-28bff539b32a" containerID="97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281" exitCode=0 Dec 05 08:19:26 crc kubenswrapper[4863]: I1205 08:19:26.211058 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerDied","Data":"97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281"} Dec 05 08:19:26 crc kubenswrapper[4863]: I1205 08:19:26.211088 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerStarted","Data":"78e872609270c81f1e4ce0f389b820b442324a07e39257eb08aeabbbe16d8076"} Dec 05 08:19:28 crc kubenswrapper[4863]: I1205 08:19:28.228729 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerStarted","Data":"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231"} Dec 05 08:19:30 crc kubenswrapper[4863]: I1205 08:19:30.250673 4863 generic.go:334] "Generic (PLEG): container finished" podID="48bf6874-8dee-4478-87ad-28bff539b32a" containerID="2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231" exitCode=0 Dec 05 08:19:30 crc kubenswrapper[4863]: I1205 08:19:30.250749 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerDied","Data":"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231"} Dec 05 08:19:31 crc kubenswrapper[4863]: I1205 08:19:31.269116 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerStarted","Data":"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33"} Dec 05 08:19:31 crc kubenswrapper[4863]: I1205 08:19:31.311002 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d9sg4" podStartSLOduration=2.862918837 podStartE2EDuration="7.310943434s" podCreationTimestamp="2025-12-05 08:19:24 +0000 UTC" firstStartedPulling="2025-12-05 08:19:26.212533496 +0000 UTC m=+5593.938530546" lastFinishedPulling="2025-12-05 08:19:30.660558103 +0000 UTC m=+5598.386555143" observedRunningTime="2025-12-05 08:19:31.298123164 +0000 UTC m=+5599.024120284" watchObservedRunningTime="2025-12-05 08:19:31.310943434 +0000 UTC m=+5599.036940494" Dec 05 08:19:35 crc kubenswrapper[4863]: I1205 08:19:35.297797 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:35 crc kubenswrapper[4863]: I1205 08:19:35.298158 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:36 crc kubenswrapper[4863]: I1205 08:19:36.375992 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d9sg4" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="registry-server" probeResult="failure" output=< Dec 05 08:19:36 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 08:19:36 crc kubenswrapper[4863]: > Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.238632 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7c8786bdb9-lfhbp"] Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.240727 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.248449 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c8786bdb9-lfhbp"] Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.274655 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-scripts\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.274758 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmhxw\" (UniqueName: \"kubernetes.io/projected/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-kube-api-access-xmhxw\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.274789 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-horizon-secret-key\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.274817 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-config-data\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.274844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-logs\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.376669 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmhxw\" (UniqueName: \"kubernetes.io/projected/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-kube-api-access-xmhxw\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.376739 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-horizon-secret-key\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.376771 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-config-data\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.376802 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-logs\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.376906 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-scripts\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.377427 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-logs\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.377947 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-scripts\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.378114 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-config-data\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.383842 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-horizon-secret-key\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.395893 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmhxw\" (UniqueName: \"kubernetes.io/projected/219bf904-4af3-4f1e-a1e9-97a72fcf6c0f-kube-api-access-xmhxw\") pod \"horizon-7c8786bdb9-lfhbp\" (UID: \"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f\") " pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:43 crc kubenswrapper[4863]: I1205 08:19:43.557543 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.021020 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c8786bdb9-lfhbp"] Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.398745 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8786bdb9-lfhbp" event={"ID":"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f","Type":"ContainerStarted","Data":"df2bd1c4a11d9293c4ea620e05efb2d63d3ba8d70ffaf33fde2a1faf81e4c200"} Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.399012 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8786bdb9-lfhbp" event={"ID":"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f","Type":"ContainerStarted","Data":"e1e9dfa90ec92c7ef79c9b8ab4fee5cbbf12988f1e2d1f53fd0763d47efd041b"} Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.399027 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8786bdb9-lfhbp" event={"ID":"219bf904-4af3-4f1e-a1e9-97a72fcf6c0f","Type":"ContainerStarted","Data":"be22699815bfaa02e0cfdeca0f25d075446a6c1a4e7cd7eb4a46f014dfbcb918"} Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.421353 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7c8786bdb9-lfhbp" podStartSLOduration=1.421332999 podStartE2EDuration="1.421332999s" podCreationTimestamp="2025-12-05 08:19:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:19:44.415828085 +0000 UTC m=+5612.141825145" watchObservedRunningTime="2025-12-05 08:19:44.421332999 +0000 UTC m=+5612.147330039" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.443602 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-lwzdb"] Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.445112 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.462805 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-lwzdb"] Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.500252 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb29w\" (UniqueName: \"kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.500623 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.534543 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-0a24-account-create-update-8kkx2"] Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.535711 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.537820 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.572489 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-0a24-account-create-update-8kkx2"] Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.601966 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.602080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb29w\" (UniqueName: \"kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.602196 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvr66\" (UniqueName: \"kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.602247 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.602765 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.626686 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb29w\" (UniqueName: \"kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w\") pod \"heat-db-create-lwzdb\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.703772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.703996 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvr66\" (UniqueName: \"kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.704856 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.720496 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvr66\" (UniqueName: \"kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66\") pod \"heat-0a24-account-create-update-8kkx2\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.766708 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:44 crc kubenswrapper[4863]: I1205 08:19:44.864119 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.218984 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-lwzdb"] Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.361101 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:45 crc kubenswrapper[4863]: W1205 08:19:45.361895 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode259db5d_318c_484f_b907_3ff3f053a96d.slice/crio-90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21 WatchSource:0}: Error finding container 90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21: Status 404 returned error can't find the container with id 90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21 Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.367700 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-0a24-account-create-update-8kkx2"] Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.409128 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0a24-account-create-update-8kkx2" event={"ID":"e259db5d-318c-484f-b907-3ff3f053a96d","Type":"ContainerStarted","Data":"90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21"} Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.410591 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-lwzdb" event={"ID":"487987c6-44ef-446d-805f-2a6e57dcc81b","Type":"ContainerStarted","Data":"76b0864ce2ebcee7bca068b3fab8d1e226557a5fc40ecc7d0a9f792bd47b3419"} Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.461251 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:45 crc kubenswrapper[4863]: I1205 08:19:45.597780 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.420773 4863 generic.go:334] "Generic (PLEG): container finished" podID="e259db5d-318c-484f-b907-3ff3f053a96d" containerID="173a917e746569ccd1a001fb131ba542525262eb3cb4c4b6f85d8d9291c914d0" exitCode=0 Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.420827 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0a24-account-create-update-8kkx2" event={"ID":"e259db5d-318c-484f-b907-3ff3f053a96d","Type":"ContainerDied","Data":"173a917e746569ccd1a001fb131ba542525262eb3cb4c4b6f85d8d9291c914d0"} Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.424085 4863 generic.go:334] "Generic (PLEG): container finished" podID="487987c6-44ef-446d-805f-2a6e57dcc81b" containerID="2b3bba5fefc8e577d836cc28b0c136d8064d3f15deb260e9f3ec29293d096bb7" exitCode=0 Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.424526 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d9sg4" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="registry-server" containerID="cri-o://3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33" gracePeriod=2 Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.424780 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-lwzdb" event={"ID":"487987c6-44ef-446d-805f-2a6e57dcc81b","Type":"ContainerDied","Data":"2b3bba5fefc8e577d836cc28b0c136d8064d3f15deb260e9f3ec29293d096bb7"} Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.902765 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.948893 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities\") pod \"48bf6874-8dee-4478-87ad-28bff539b32a\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.949033 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content\") pod \"48bf6874-8dee-4478-87ad-28bff539b32a\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.949162 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvt66\" (UniqueName: \"kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66\") pod \"48bf6874-8dee-4478-87ad-28bff539b32a\" (UID: \"48bf6874-8dee-4478-87ad-28bff539b32a\") " Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.950525 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities" (OuterVolumeSpecName: "utilities") pod "48bf6874-8dee-4478-87ad-28bff539b32a" (UID: "48bf6874-8dee-4478-87ad-28bff539b32a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:19:46 crc kubenswrapper[4863]: I1205 08:19:46.967402 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66" (OuterVolumeSpecName: "kube-api-access-fvt66") pod "48bf6874-8dee-4478-87ad-28bff539b32a" (UID: "48bf6874-8dee-4478-87ad-28bff539b32a"). InnerVolumeSpecName "kube-api-access-fvt66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.050971 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.051027 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvt66\" (UniqueName: \"kubernetes.io/projected/48bf6874-8dee-4478-87ad-28bff539b32a-kube-api-access-fvt66\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.052965 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48bf6874-8dee-4478-87ad-28bff539b32a" (UID: "48bf6874-8dee-4478-87ad-28bff539b32a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.153898 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bf6874-8dee-4478-87ad-28bff539b32a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.440619 4863 generic.go:334] "Generic (PLEG): container finished" podID="48bf6874-8dee-4478-87ad-28bff539b32a" containerID="3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33" exitCode=0 Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.440698 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d9sg4" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.440743 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerDied","Data":"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33"} Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.441973 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d9sg4" event={"ID":"48bf6874-8dee-4478-87ad-28bff539b32a","Type":"ContainerDied","Data":"78e872609270c81f1e4ce0f389b820b442324a07e39257eb08aeabbbe16d8076"} Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.442025 4863 scope.go:117] "RemoveContainer" containerID="3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.470819 4863 scope.go:117] "RemoveContainer" containerID="2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.494730 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.503404 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d9sg4"] Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.519983 4863 scope.go:117] "RemoveContainer" containerID="97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.561401 4863 scope.go:117] "RemoveContainer" containerID="3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33" Dec 05 08:19:47 crc kubenswrapper[4863]: E1205 08:19:47.562131 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33\": container with ID starting with 3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33 not found: ID does not exist" containerID="3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.562168 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33"} err="failed to get container status \"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33\": rpc error: code = NotFound desc = could not find container \"3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33\": container with ID starting with 3a680e2e8734e1f42dd58e0bf32d7b2651219ffea25a892682b7cd8432df5b33 not found: ID does not exist" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.562190 4863 scope.go:117] "RemoveContainer" containerID="2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231" Dec 05 08:19:47 crc kubenswrapper[4863]: E1205 08:19:47.564437 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231\": container with ID starting with 2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231 not found: ID does not exist" containerID="2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.564512 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231"} err="failed to get container status \"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231\": rpc error: code = NotFound desc = could not find container \"2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231\": container with ID starting with 2bef5c4c47b3fe0d9f6770baace7d3e45b89398766c31b310c31ed0e7b227231 not found: ID does not exist" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.564540 4863 scope.go:117] "RemoveContainer" containerID="97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281" Dec 05 08:19:47 crc kubenswrapper[4863]: E1205 08:19:47.565091 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281\": container with ID starting with 97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281 not found: ID does not exist" containerID="97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.565140 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281"} err="failed to get container status \"97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281\": rpc error: code = NotFound desc = could not find container \"97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281\": container with ID starting with 97e8016af04bdc95dfe0dd50a85be137ac1c7af89773e0bba34be0627577e281 not found: ID does not exist" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.863600 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.874845 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.969889 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvr66\" (UniqueName: \"kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66\") pod \"e259db5d-318c-484f-b907-3ff3f053a96d\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.969939 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts\") pod \"487987c6-44ef-446d-805f-2a6e57dcc81b\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.969985 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb29w\" (UniqueName: \"kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w\") pod \"487987c6-44ef-446d-805f-2a6e57dcc81b\" (UID: \"487987c6-44ef-446d-805f-2a6e57dcc81b\") " Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.970073 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts\") pod \"e259db5d-318c-484f-b907-3ff3f053a96d\" (UID: \"e259db5d-318c-484f-b907-3ff3f053a96d\") " Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.970670 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e259db5d-318c-484f-b907-3ff3f053a96d" (UID: "e259db5d-318c-484f-b907-3ff3f053a96d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.970882 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "487987c6-44ef-446d-805f-2a6e57dcc81b" (UID: "487987c6-44ef-446d-805f-2a6e57dcc81b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.974226 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w" (OuterVolumeSpecName: "kube-api-access-rb29w") pod "487987c6-44ef-446d-805f-2a6e57dcc81b" (UID: "487987c6-44ef-446d-805f-2a6e57dcc81b"). InnerVolumeSpecName "kube-api-access-rb29w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:19:47 crc kubenswrapper[4863]: I1205 08:19:47.975832 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66" (OuterVolumeSpecName: "kube-api-access-dvr66") pod "e259db5d-318c-484f-b907-3ff3f053a96d" (UID: "e259db5d-318c-484f-b907-3ff3f053a96d"). InnerVolumeSpecName "kube-api-access-dvr66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.067940 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-pp6zp"] Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.076265 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvr66\" (UniqueName: \"kubernetes.io/projected/e259db5d-318c-484f-b907-3ff3f053a96d-kube-api-access-dvr66\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.076312 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/487987c6-44ef-446d-805f-2a6e57dcc81b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.076323 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb29w\" (UniqueName: \"kubernetes.io/projected/487987c6-44ef-446d-805f-2a6e57dcc81b-kube-api-access-rb29w\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.076333 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e259db5d-318c-484f-b907-3ff3f053a96d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.082098 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1a4b-account-create-update-27g2v"] Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.090894 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1a4b-account-create-update-27g2v"] Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.100571 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-pp6zp"] Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.455976 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-0a24-account-create-update-8kkx2" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.455980 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-0a24-account-create-update-8kkx2" event={"ID":"e259db5d-318c-484f-b907-3ff3f053a96d","Type":"ContainerDied","Data":"90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21"} Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.456166 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90cf256b90f5ca11fefec6d947280289c31ea1d7fed4df6cef7b86b09ed2fb21" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.459409 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-lwzdb" event={"ID":"487987c6-44ef-446d-805f-2a6e57dcc81b","Type":"ContainerDied","Data":"76b0864ce2ebcee7bca068b3fab8d1e226557a5fc40ecc7d0a9f792bd47b3419"} Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.459438 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76b0864ce2ebcee7bca068b3fab8d1e226557a5fc40ecc7d0a9f792bd47b3419" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.459510 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-lwzdb" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.613036 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a3536c-978c-4eb9-8493-66d82c888911" path="/var/lib/kubelet/pods/06a3536c-978c-4eb9-8493-66d82c888911/volumes" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.613819 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" path="/var/lib/kubelet/pods/48bf6874-8dee-4478-87ad-28bff539b32a/volumes" Dec 05 08:19:48 crc kubenswrapper[4863]: I1205 08:19:48.614708 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2959946-59e3-4a24-b94c-c0435cf60fca" path="/var/lib/kubelet/pods/a2959946-59e3-4a24-b94c-c0435cf60fca/volumes" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.694220 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-lht28"] Dec 05 08:19:49 crc kubenswrapper[4863]: E1205 08:19:49.695174 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="extract-utilities" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695193 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="extract-utilities" Dec 05 08:19:49 crc kubenswrapper[4863]: E1205 08:19:49.695229 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e259db5d-318c-484f-b907-3ff3f053a96d" containerName="mariadb-account-create-update" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695241 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e259db5d-318c-484f-b907-3ff3f053a96d" containerName="mariadb-account-create-update" Dec 05 08:19:49 crc kubenswrapper[4863]: E1205 08:19:49.695254 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="registry-server" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695262 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="registry-server" Dec 05 08:19:49 crc kubenswrapper[4863]: E1205 08:19:49.695280 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="487987c6-44ef-446d-805f-2a6e57dcc81b" containerName="mariadb-database-create" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695288 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="487987c6-44ef-446d-805f-2a6e57dcc81b" containerName="mariadb-database-create" Dec 05 08:19:49 crc kubenswrapper[4863]: E1205 08:19:49.695305 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="extract-content" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695315 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="extract-content" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695640 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="48bf6874-8dee-4478-87ad-28bff539b32a" containerName="registry-server" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695681 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="487987c6-44ef-446d-805f-2a6e57dcc81b" containerName="mariadb-database-create" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.695694 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e259db5d-318c-484f-b907-3ff3f053a96d" containerName="mariadb-account-create-update" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.696709 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.699970 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-6jn99" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.700211 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.705062 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-lht28"] Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.715919 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.716059 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.716104 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c45cw\" (UniqueName: \"kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.818080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.818211 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.818303 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c45cw\" (UniqueName: \"kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.823214 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.823674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:49 crc kubenswrapper[4863]: I1205 08:19:49.833606 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c45cw\" (UniqueName: \"kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw\") pod \"heat-db-sync-lht28\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " pod="openstack/heat-db-sync-lht28" Dec 05 08:19:50 crc kubenswrapper[4863]: I1205 08:19:50.032095 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-lht28" Dec 05 08:19:50 crc kubenswrapper[4863]: I1205 08:19:50.516555 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-lht28"] Dec 05 08:19:51 crc kubenswrapper[4863]: I1205 08:19:51.493459 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-lht28" event={"ID":"21009658-c5e6-4b14-9328-47545509992e","Type":"ContainerStarted","Data":"97891e6954c3a6fb6ec4bdfb92d079ddcbe9eb3847b9227d47c674d8589f1f75"} Dec 05 08:19:53 crc kubenswrapper[4863]: I1205 08:19:53.558066 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:53 crc kubenswrapper[4863]: I1205 08:19:53.558377 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:19:58 crc kubenswrapper[4863]: I1205 08:19:58.574556 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-lht28" event={"ID":"21009658-c5e6-4b14-9328-47545509992e","Type":"ContainerStarted","Data":"dda52595d5274770feb8eb8265baf75afbb9fbae229d59c173604766310f67e8"} Dec 05 08:19:58 crc kubenswrapper[4863]: I1205 08:19:58.602952 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-lht28" podStartSLOduration=2.352227452 podStartE2EDuration="9.602935005s" podCreationTimestamp="2025-12-05 08:19:49 +0000 UTC" firstStartedPulling="2025-12-05 08:19:50.51759542 +0000 UTC m=+5618.243592460" lastFinishedPulling="2025-12-05 08:19:57.768302973 +0000 UTC m=+5625.494300013" observedRunningTime="2025-12-05 08:19:58.595439033 +0000 UTC m=+5626.321436113" watchObservedRunningTime="2025-12-05 08:19:58.602935005 +0000 UTC m=+5626.328932045" Dec 05 08:19:59 crc kubenswrapper[4863]: I1205 08:19:59.033050 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-85gqg"] Dec 05 08:19:59 crc kubenswrapper[4863]: I1205 08:19:59.042840 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-85gqg"] Dec 05 08:20:00 crc kubenswrapper[4863]: I1205 08:20:00.600507 4863 generic.go:334] "Generic (PLEG): container finished" podID="21009658-c5e6-4b14-9328-47545509992e" containerID="dda52595d5274770feb8eb8265baf75afbb9fbae229d59c173604766310f67e8" exitCode=0 Dec 05 08:20:00 crc kubenswrapper[4863]: I1205 08:20:00.600676 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-lht28" event={"ID":"21009658-c5e6-4b14-9328-47545509992e","Type":"ContainerDied","Data":"dda52595d5274770feb8eb8265baf75afbb9fbae229d59c173604766310f67e8"} Dec 05 08:20:00 crc kubenswrapper[4863]: I1205 08:20:00.629721 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="492ce819-1631-46d6-aff0-8d9e135116ef" path="/var/lib/kubelet/pods/492ce819-1631-46d6-aff0-8d9e135116ef/volumes" Dec 05 08:20:01 crc kubenswrapper[4863]: I1205 08:20:01.978971 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-lht28" Dec 05 08:20:01 crc kubenswrapper[4863]: I1205 08:20:01.995931 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c45cw\" (UniqueName: \"kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw\") pod \"21009658-c5e6-4b14-9328-47545509992e\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " Dec 05 08:20:01 crc kubenswrapper[4863]: I1205 08:20:01.996026 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle\") pod \"21009658-c5e6-4b14-9328-47545509992e\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " Dec 05 08:20:01 crc kubenswrapper[4863]: I1205 08:20:01.996460 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data\") pod \"21009658-c5e6-4b14-9328-47545509992e\" (UID: \"21009658-c5e6-4b14-9328-47545509992e\") " Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.003184 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw" (OuterVolumeSpecName: "kube-api-access-c45cw") pod "21009658-c5e6-4b14-9328-47545509992e" (UID: "21009658-c5e6-4b14-9328-47545509992e"). InnerVolumeSpecName "kube-api-access-c45cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.022155 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21009658-c5e6-4b14-9328-47545509992e" (UID: "21009658-c5e6-4b14-9328-47545509992e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.063115 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data" (OuterVolumeSpecName: "config-data") pod "21009658-c5e6-4b14-9328-47545509992e" (UID: "21009658-c5e6-4b14-9328-47545509992e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.099462 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.099511 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c45cw\" (UniqueName: \"kubernetes.io/projected/21009658-c5e6-4b14-9328-47545509992e-kube-api-access-c45cw\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.099525 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21009658-c5e6-4b14-9328-47545509992e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.620492 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-lht28" event={"ID":"21009658-c5e6-4b14-9328-47545509992e","Type":"ContainerDied","Data":"97891e6954c3a6fb6ec4bdfb92d079ddcbe9eb3847b9227d47c674d8589f1f75"} Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.620541 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97891e6954c3a6fb6ec4bdfb92d079ddcbe9eb3847b9227d47c674d8589f1f75" Dec 05 08:20:02 crc kubenswrapper[4863]: I1205 08:20:02.620547 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-lht28" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.651920 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-7cdbc8777d-ww6sx"] Dec 05 08:20:03 crc kubenswrapper[4863]: E1205 08:20:03.653127 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21009658-c5e6-4b14-9328-47545509992e" containerName="heat-db-sync" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.653148 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="21009658-c5e6-4b14-9328-47545509992e" containerName="heat-db-sync" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.653357 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="21009658-c5e6-4b14-9328-47545509992e" containerName="heat-db-sync" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.654314 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.664062 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-6jn99" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.664337 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.679288 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7cdbc8777d-ww6sx"] Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.681113 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.730535 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.730604 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data-custom\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.730767 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-combined-ca-bundle\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.730790 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw8cs\" (UniqueName: \"kubernetes.io/projected/76c4aa64-a697-4589-8cf1-cfe55095bf12-kube-api-access-hw8cs\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.756283 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-598f986844-4pl5l"] Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.757639 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.760379 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.775479 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-598f986844-4pl5l"] Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.789659 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6f444c5f8b-2b4vc"] Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.791591 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.793756 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.810234 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f444c5f8b-2b4vc"] Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833652 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833727 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data-custom\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833761 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data-custom\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833805 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-combined-ca-bundle\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833853 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-combined-ca-bundle\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833889 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833937 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c724q\" (UniqueName: \"kubernetes.io/projected/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-kube-api-access-c724q\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.833976 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.834002 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data-custom\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.834045 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gksdp\" (UniqueName: \"kubernetes.io/projected/3d698629-4fde-46f2-a374-11418a9c99a6-kube-api-access-gksdp\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.834067 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-combined-ca-bundle\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.834086 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw8cs\" (UniqueName: \"kubernetes.io/projected/76c4aa64-a697-4589-8cf1-cfe55095bf12-kube-api-access-hw8cs\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.842766 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-combined-ca-bundle\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.842998 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.845757 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/76c4aa64-a697-4589-8cf1-cfe55095bf12-config-data-custom\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.853541 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw8cs\" (UniqueName: \"kubernetes.io/projected/76c4aa64-a697-4589-8cf1-cfe55095bf12-kube-api-access-hw8cs\") pod \"heat-engine-7cdbc8777d-ww6sx\" (UID: \"76c4aa64-a697-4589-8cf1-cfe55095bf12\") " pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.935717 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data-custom\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936025 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-combined-ca-bundle\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936100 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-combined-ca-bundle\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936632 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c724q\" (UniqueName: \"kubernetes.io/projected/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-kube-api-access-c724q\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936725 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936745 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data-custom\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.936779 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gksdp\" (UniqueName: \"kubernetes.io/projected/3d698629-4fde-46f2-a374-11418a9c99a6-kube-api-access-gksdp\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.943677 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-combined-ca-bundle\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.944824 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.948588 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-combined-ca-bundle\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.951992 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d698629-4fde-46f2-a374-11418a9c99a6-config-data-custom\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.956296 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data-custom\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.960310 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-config-data\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.960510 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gksdp\" (UniqueName: \"kubernetes.io/projected/3d698629-4fde-46f2-a374-11418a9c99a6-kube-api-access-gksdp\") pod \"heat-api-598f986844-4pl5l\" (UID: \"3d698629-4fde-46f2-a374-11418a9c99a6\") " pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:03 crc kubenswrapper[4863]: I1205 08:20:03.963155 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c724q\" (UniqueName: \"kubernetes.io/projected/8a6e1dc6-dc91-4697-a550-5058f0a7fc98-kube-api-access-c724q\") pod \"heat-cfnapi-6f444c5f8b-2b4vc\" (UID: \"8a6e1dc6-dc91-4697-a550-5058f0a7fc98\") " pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.004093 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.095571 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.127426 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.633739 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7cdbc8777d-ww6sx"] Dec 05 08:20:04 crc kubenswrapper[4863]: W1205 08:20:04.637777 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c4aa64_a697_4589_8cf1_cfe55095bf12.slice/crio-933f75c16778508707a6993b42ae2b9bc7ec785940fc58885bfbbcb90b3db433 WatchSource:0}: Error finding container 933f75c16778508707a6993b42ae2b9bc7ec785940fc58885bfbbcb90b3db433: Status 404 returned error can't find the container with id 933f75c16778508707a6993b42ae2b9bc7ec785940fc58885bfbbcb90b3db433 Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.683593 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7cdbc8777d-ww6sx" event={"ID":"76c4aa64-a697-4589-8cf1-cfe55095bf12","Type":"ContainerStarted","Data":"933f75c16778508707a6993b42ae2b9bc7ec785940fc58885bfbbcb90b3db433"} Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.816923 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-598f986844-4pl5l"] Dec 05 08:20:04 crc kubenswrapper[4863]: I1205 08:20:04.837209 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f444c5f8b-2b4vc"] Dec 05 08:20:05 crc kubenswrapper[4863]: I1205 08:20:05.699222 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7cdbc8777d-ww6sx" event={"ID":"76c4aa64-a697-4589-8cf1-cfe55095bf12","Type":"ContainerStarted","Data":"61ab914cd2da8b18d082ebe68b23721127f3b46e88c0bbb676669e6b266c9c67"} Dec 05 08:20:05 crc kubenswrapper[4863]: I1205 08:20:05.699725 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:05 crc kubenswrapper[4863]: I1205 08:20:05.701944 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" event={"ID":"8a6e1dc6-dc91-4697-a550-5058f0a7fc98","Type":"ContainerStarted","Data":"b887341c5a3f53149c8460ff36a5286983b000b4e61340f9dcbb722f5aa90783"} Dec 05 08:20:05 crc kubenswrapper[4863]: I1205 08:20:05.703572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-598f986844-4pl5l" event={"ID":"3d698629-4fde-46f2-a374-11418a9c99a6","Type":"ContainerStarted","Data":"f3d0a9cd31224e4fe5b0f02b971a52e5482cc20d13f9c23c459d7946d9c6fecc"} Dec 05 08:20:05 crc kubenswrapper[4863]: I1205 08:20:05.719521 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-7cdbc8777d-ww6sx" podStartSLOduration=2.719505234 podStartE2EDuration="2.719505234s" podCreationTimestamp="2025-12-05 08:20:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:20:05.716192224 +0000 UTC m=+5633.442189284" watchObservedRunningTime="2025-12-05 08:20:05.719505234 +0000 UTC m=+5633.445502274" Dec 05 08:20:06 crc kubenswrapper[4863]: I1205 08:20:06.127883 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.724962 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" event={"ID":"8a6e1dc6-dc91-4697-a550-5058f0a7fc98","Type":"ContainerStarted","Data":"bffc82c85fe3dc0c0035c9bb7d118f46ab923233cb509f05c9d3e220f17cc095"} Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.725739 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.726806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-598f986844-4pl5l" event={"ID":"3d698629-4fde-46f2-a374-11418a9c99a6","Type":"ContainerStarted","Data":"3f7e9bffa2c3d989f79b28a7383667b5e72e8e1ce07ec5e86e15792fd6b23d56"} Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.726972 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.759320 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" podStartSLOduration=3.123025965 podStartE2EDuration="4.759299198s" podCreationTimestamp="2025-12-05 08:20:03 +0000 UTC" firstStartedPulling="2025-12-05 08:20:04.844861771 +0000 UTC m=+5632.570858811" lastFinishedPulling="2025-12-05 08:20:06.481135014 +0000 UTC m=+5634.207132044" observedRunningTime="2025-12-05 08:20:07.757884764 +0000 UTC m=+5635.483881824" watchObservedRunningTime="2025-12-05 08:20:07.759299198 +0000 UTC m=+5635.485296238" Dec 05 08:20:07 crc kubenswrapper[4863]: I1205 08:20:07.785667 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-598f986844-4pl5l" podStartSLOduration=3.124161103 podStartE2EDuration="4.785631877s" podCreationTimestamp="2025-12-05 08:20:03 +0000 UTC" firstStartedPulling="2025-12-05 08:20:04.813943121 +0000 UTC m=+5632.539940161" lastFinishedPulling="2025-12-05 08:20:06.475413895 +0000 UTC m=+5634.201410935" observedRunningTime="2025-12-05 08:20:07.782637575 +0000 UTC m=+5635.508634635" watchObservedRunningTime="2025-12-05 08:20:07.785631877 +0000 UTC m=+5635.511628917" Dec 05 08:20:08 crc kubenswrapper[4863]: I1205 08:20:08.234908 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7c8786bdb9-lfhbp" Dec 05 08:20:08 crc kubenswrapper[4863]: I1205 08:20:08.349421 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:20:08 crc kubenswrapper[4863]: I1205 08:20:08.350075 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon-log" containerID="cri-o://5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da" gracePeriod=30 Dec 05 08:20:08 crc kubenswrapper[4863]: I1205 08:20:08.350517 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" containerID="cri-o://7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80" gracePeriod=30 Dec 05 08:20:11 crc kubenswrapper[4863]: I1205 08:20:11.766288 4863 generic.go:334] "Generic (PLEG): container finished" podID="1122499e-4014-4f82-a841-09f6417b7bb2" containerID="7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80" exitCode=0 Dec 05 08:20:11 crc kubenswrapper[4863]: I1205 08:20:11.766383 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerDied","Data":"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80"} Dec 05 08:20:11 crc kubenswrapper[4863]: I1205 08:20:11.884678 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Dec 05 08:20:14 crc kubenswrapper[4863]: I1205 08:20:14.050705 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-7cdbc8777d-ww6sx" Dec 05 08:20:15 crc kubenswrapper[4863]: I1205 08:20:15.588930 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6f444c5f8b-2b4vc" Dec 05 08:20:15 crc kubenswrapper[4863]: I1205 08:20:15.590877 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-598f986844-4pl5l" Dec 05 08:20:21 crc kubenswrapper[4863]: I1205 08:20:21.883784 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.050867 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-7pptc"] Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.059684 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8a5d-account-create-update-txbg6"] Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.072259 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8a5d-account-create-update-txbg6"] Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.082895 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-7pptc"] Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.619026 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93e2efad-0215-41d1-a32b-62b1c92c6576" path="/var/lib/kubelet/pods/93e2efad-0215-41d1-a32b-62b1c92c6576/volumes" Dec 05 08:20:24 crc kubenswrapper[4863]: I1205 08:20:24.620197 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7dcee0e-bc20-45a8-8f25-088c0fa00c73" path="/var/lib/kubelet/pods/b7dcee0e-bc20-45a8-8f25-088c0fa00c73/volumes" Dec 05 08:20:31 crc kubenswrapper[4863]: I1205 08:20:31.884564 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68d5d5cd95-xjd79" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Dec 05 08:20:31 crc kubenswrapper[4863]: I1205 08:20:31.885276 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:20:32 crc kubenswrapper[4863]: I1205 08:20:32.031705 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-9bxkw"] Dec 05 08:20:32 crc kubenswrapper[4863]: I1205 08:20:32.042160 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-9bxkw"] Dec 05 08:20:32 crc kubenswrapper[4863]: I1205 08:20:32.613732 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1898d5da-0166-4d68-9fb9-95f980359e8c" path="/var/lib/kubelet/pods/1898d5da-0166-4d68-9fb9-95f980359e8c/volumes" Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.758986 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv"] Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.761893 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.764048 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.771898 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv"] Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.942219 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.942272 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h79fp\" (UniqueName: \"kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:34 crc kubenswrapper[4863]: I1205 08:20:34.942532 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.044621 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.044674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h79fp\" (UniqueName: \"kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.044803 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.045337 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.045627 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.069198 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h79fp\" (UniqueName: \"kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.097749 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.237919 4863 scope.go:117] "RemoveContainer" containerID="e87f1f3405cea4e3a10a44508f43a2fe986422e5e7167ae8a1982d61482c442a" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.275380 4863 scope.go:117] "RemoveContainer" containerID="985ef58a47aaeceb3d311c0b79701bc46f1acfba38ec5641683866ad8c75ccc8" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.348228 4863 scope.go:117] "RemoveContainer" containerID="c09ab9f39395160fd517680b736ec63532ecc5ba5e1f95cb5a8e29d6a1ccab52" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.398193 4863 scope.go:117] "RemoveContainer" containerID="d2dff5629035fa87c068ff59d577620390692ccf0bda4e8c4d8af4e0ac3ac45e" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.435263 4863 scope.go:117] "RemoveContainer" containerID="3a0aa842d1e339d9f4b779f7b18161be5424c694dd03288204f0ff8c8c2127c0" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.454004 4863 scope.go:117] "RemoveContainer" containerID="5cba772715f61d0b35afba5d43b629d7cccf67a31c41602ecbcb3c2ba99f6e9d" Dec 05 08:20:35 crc kubenswrapper[4863]: I1205 08:20:35.595915 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv"] Dec 05 08:20:36 crc kubenswrapper[4863]: I1205 08:20:36.009078 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerStarted","Data":"b2fe1f8df8cad53701e81c666037aa8911dab62c765ad07463a5e1c9c21a6e70"} Dec 05 08:20:36 crc kubenswrapper[4863]: I1205 08:20:36.009566 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerStarted","Data":"8b93c703152cb8e680f03c3683516e2495da81625515d318559e9daf7bf7d28a"} Dec 05 08:20:37 crc kubenswrapper[4863]: I1205 08:20:37.021249 4863 generic.go:334] "Generic (PLEG): container finished" podID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerID="b2fe1f8df8cad53701e81c666037aa8911dab62c765ad07463a5e1c9c21a6e70" exitCode=0 Dec 05 08:20:37 crc kubenswrapper[4863]: I1205 08:20:37.021337 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerDied","Data":"b2fe1f8df8cad53701e81c666037aa8911dab62c765ad07463a5e1c9c21a6e70"} Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.463869 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.464310 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.833524 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926212 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs\") pod \"1122499e-4014-4f82-a841-09f6417b7bb2\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926275 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data\") pod \"1122499e-4014-4f82-a841-09f6417b7bb2\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926315 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk25h\" (UniqueName: \"kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h\") pod \"1122499e-4014-4f82-a841-09f6417b7bb2\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926339 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key\") pod \"1122499e-4014-4f82-a841-09f6417b7bb2\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926508 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts\") pod \"1122499e-4014-4f82-a841-09f6417b7bb2\" (UID: \"1122499e-4014-4f82-a841-09f6417b7bb2\") " Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.926691 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs" (OuterVolumeSpecName: "logs") pod "1122499e-4014-4f82-a841-09f6417b7bb2" (UID: "1122499e-4014-4f82-a841-09f6417b7bb2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.927184 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1122499e-4014-4f82-a841-09f6417b7bb2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.933031 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1122499e-4014-4f82-a841-09f6417b7bb2" (UID: "1122499e-4014-4f82-a841-09f6417b7bb2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.933455 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h" (OuterVolumeSpecName: "kube-api-access-bk25h") pod "1122499e-4014-4f82-a841-09f6417b7bb2" (UID: "1122499e-4014-4f82-a841-09f6417b7bb2"). InnerVolumeSpecName "kube-api-access-bk25h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.954383 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts" (OuterVolumeSpecName: "scripts") pod "1122499e-4014-4f82-a841-09f6417b7bb2" (UID: "1122499e-4014-4f82-a841-09f6417b7bb2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:38 crc kubenswrapper[4863]: I1205 08:20:38.956233 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data" (OuterVolumeSpecName: "config-data") pod "1122499e-4014-4f82-a841-09f6417b7bb2" (UID: "1122499e-4014-4f82-a841-09f6417b7bb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.029144 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.029180 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk25h\" (UniqueName: \"kubernetes.io/projected/1122499e-4014-4f82-a841-09f6417b7bb2-kube-api-access-bk25h\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.029192 4863 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1122499e-4014-4f82-a841-09f6417b7bb2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.029202 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1122499e-4014-4f82-a841-09f6417b7bb2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.047509 4863 generic.go:334] "Generic (PLEG): container finished" podID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerID="7622469f3611c61a4a920e7235a115a922b1fcdd11071c5a67d0a5471b550d42" exitCode=0 Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.047581 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerDied","Data":"7622469f3611c61a4a920e7235a115a922b1fcdd11071c5a67d0a5471b550d42"} Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.051194 4863 generic.go:334] "Generic (PLEG): container finished" podID="1122499e-4014-4f82-a841-09f6417b7bb2" containerID="5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da" exitCode=137 Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.051232 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerDied","Data":"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da"} Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.051248 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68d5d5cd95-xjd79" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.051257 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68d5d5cd95-xjd79" event={"ID":"1122499e-4014-4f82-a841-09f6417b7bb2","Type":"ContainerDied","Data":"21401449d7b649d6d6e0ad00d349e129efe93493c1a55197a72c9aad0f4c8a3c"} Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.051276 4863 scope.go:117] "RemoveContainer" containerID="7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.090290 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.098201 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-68d5d5cd95-xjd79"] Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.213228 4863 scope.go:117] "RemoveContainer" containerID="5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.233158 4863 scope.go:117] "RemoveContainer" containerID="7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80" Dec 05 08:20:39 crc kubenswrapper[4863]: E1205 08:20:39.233635 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80\": container with ID starting with 7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80 not found: ID does not exist" containerID="7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.233756 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80"} err="failed to get container status \"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80\": rpc error: code = NotFound desc = could not find container \"7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80\": container with ID starting with 7c75886dc2bd103d694e6f0e30a8b27198d55bf5129873342d6191724fe6ce80 not found: ID does not exist" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.233858 4863 scope.go:117] "RemoveContainer" containerID="5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da" Dec 05 08:20:39 crc kubenswrapper[4863]: E1205 08:20:39.234136 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da\": container with ID starting with 5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da not found: ID does not exist" containerID="5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da" Dec 05 08:20:39 crc kubenswrapper[4863]: I1205 08:20:39.234235 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da"} err="failed to get container status \"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da\": rpc error: code = NotFound desc = could not find container \"5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da\": container with ID starting with 5547d51e4e54bfc92d03ca1a868d49007696a0e486a591c874d4f51b7d9235da not found: ID does not exist" Dec 05 08:20:40 crc kubenswrapper[4863]: I1205 08:20:40.063061 4863 generic.go:334] "Generic (PLEG): container finished" podID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerID="2db3d492deb0eee1157f564b72ac23f0aa6a8fdf7ed1ab5b8b29d35e11b1debe" exitCode=0 Dec 05 08:20:40 crc kubenswrapper[4863]: I1205 08:20:40.063585 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerDied","Data":"2db3d492deb0eee1157f564b72ac23f0aa6a8fdf7ed1ab5b8b29d35e11b1debe"} Dec 05 08:20:40 crc kubenswrapper[4863]: I1205 08:20:40.617518 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" path="/var/lib/kubelet/pods/1122499e-4014-4f82-a841-09f6417b7bb2/volumes" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.466161 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.578013 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util\") pod \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.578368 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle\") pod \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.578600 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h79fp\" (UniqueName: \"kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp\") pod \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\" (UID: \"9c2abff9-3399-41b3-ba3b-a65c8ec5d371\") " Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.579852 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle" (OuterVolumeSpecName: "bundle") pod "9c2abff9-3399-41b3-ba3b-a65c8ec5d371" (UID: "9c2abff9-3399-41b3-ba3b-a65c8ec5d371"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.582668 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util" (OuterVolumeSpecName: "util") pod "9c2abff9-3399-41b3-ba3b-a65c8ec5d371" (UID: "9c2abff9-3399-41b3-ba3b-a65c8ec5d371"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.583605 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp" (OuterVolumeSpecName: "kube-api-access-h79fp") pod "9c2abff9-3399-41b3-ba3b-a65c8ec5d371" (UID: "9c2abff9-3399-41b3-ba3b-a65c8ec5d371"). InnerVolumeSpecName "kube-api-access-h79fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.680995 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h79fp\" (UniqueName: \"kubernetes.io/projected/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-kube-api-access-h79fp\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.681024 4863 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-util\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:41 crc kubenswrapper[4863]: I1205 08:20:41.681040 4863 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9c2abff9-3399-41b3-ba3b-a65c8ec5d371-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:42 crc kubenswrapper[4863]: I1205 08:20:42.095463 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" event={"ID":"9c2abff9-3399-41b3-ba3b-a65c8ec5d371","Type":"ContainerDied","Data":"8b93c703152cb8e680f03c3683516e2495da81625515d318559e9daf7bf7d28a"} Dec 05 08:20:42 crc kubenswrapper[4863]: I1205 08:20:42.095534 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b93c703152cb8e680f03c3683516e2495da81625515d318559e9daf7bf7d28a" Dec 05 08:20:42 crc kubenswrapper[4863]: I1205 08:20:42.095598 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.444111 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r"] Dec 05 08:20:52 crc kubenswrapper[4863]: E1205 08:20:52.460068 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="util" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.460363 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="util" Dec 05 08:20:52 crc kubenswrapper[4863]: E1205 08:20:52.460463 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="pull" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.460561 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="pull" Dec 05 08:20:52 crc kubenswrapper[4863]: E1205 08:20:52.460652 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.460736 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" Dec 05 08:20:52 crc kubenswrapper[4863]: E1205 08:20:52.460838 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon-log" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.460912 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon-log" Dec 05 08:20:52 crc kubenswrapper[4863]: E1205 08:20:52.460973 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="extract" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.461027 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="extract" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.461254 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon-log" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.461324 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c2abff9-3399-41b3-ba3b-a65c8ec5d371" containerName="extract" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.461396 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1122499e-4014-4f82-a841-09f6417b7bb2" containerName="horizon" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.462102 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.462266 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.465815 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.466023 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-q7xh6" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.466028 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.497212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqhvb\" (UniqueName: \"kubernetes.io/projected/d7194909-83dd-41b1-af13-9a8b6212f1b3-kube-api-access-tqhvb\") pod \"obo-prometheus-operator-668cf9dfbb-b8q8r\" (UID: \"d7194909-83dd-41b1-af13-9a8b6212f1b3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.501579 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.503177 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.511188 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-9h8wg" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.513329 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.540509 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.550715 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.607093 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.607174 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.607281 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.607351 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqhvb\" (UniqueName: \"kubernetes.io/projected/d7194909-83dd-41b1-af13-9a8b6212f1b3-kube-api-access-tqhvb\") pod \"obo-prometheus-operator-668cf9dfbb-b8q8r\" (UID: \"d7194909-83dd-41b1-af13-9a8b6212f1b3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.607643 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.634771 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqhvb\" (UniqueName: \"kubernetes.io/projected/d7194909-83dd-41b1-af13-9a8b6212f1b3-kube-api-access-tqhvb\") pod \"obo-prometheus-operator-668cf9dfbb-b8q8r\" (UID: \"d7194909-83dd-41b1-af13-9a8b6212f1b3\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.637633 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.651852 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.689538 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jn96f"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.691295 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.699528 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.699732 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-lrxpn" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.703302 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jn96f"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.708981 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.709215 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.709260 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.709311 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.716004 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.725088 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.725349 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a6c106e-c0dd-41c6-b598-fd8a1362b9ea-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j\" (UID: \"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.727331 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t\" (UID: \"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.784695 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lpvbx"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.786023 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.787719 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-zqngc" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.792966 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.801282 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lpvbx"] Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.814617 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk87r\" (UniqueName: \"kubernetes.io/projected/d6f48c43-3adb-4b52-95ab-dbc35a392423-kube-api-access-gk87r\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.814883 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6f48c43-3adb-4b52-95ab-dbc35a392423-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.853756 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.894276 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.916421 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk87r\" (UniqueName: \"kubernetes.io/projected/d6f48c43-3adb-4b52-95ab-dbc35a392423-kube-api-access-gk87r\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.916550 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84158b8a-71ed-4dc1-8789-83583fa243d4-openshift-service-ca\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.916588 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6f48c43-3adb-4b52-95ab-dbc35a392423-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.916663 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4slkp\" (UniqueName: \"kubernetes.io/projected/84158b8a-71ed-4dc1-8789-83583fa243d4-kube-api-access-4slkp\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.922723 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6f48c43-3adb-4b52-95ab-dbc35a392423-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:52 crc kubenswrapper[4863]: I1205 08:20:52.939674 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk87r\" (UniqueName: \"kubernetes.io/projected/d6f48c43-3adb-4b52-95ab-dbc35a392423-kube-api-access-gk87r\") pod \"observability-operator-d8bb48f5d-jn96f\" (UID: \"d6f48c43-3adb-4b52-95ab-dbc35a392423\") " pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.021010 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84158b8a-71ed-4dc1-8789-83583fa243d4-openshift-service-ca\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.021228 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4slkp\" (UniqueName: \"kubernetes.io/projected/84158b8a-71ed-4dc1-8789-83583fa243d4-kube-api-access-4slkp\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.022413 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84158b8a-71ed-4dc1-8789-83583fa243d4-openshift-service-ca\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.036640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4slkp\" (UniqueName: \"kubernetes.io/projected/84158b8a-71ed-4dc1-8789-83583fa243d4-kube-api-access-4slkp\") pod \"perses-operator-5446b9c989-lpvbx\" (UID: \"84158b8a-71ed-4dc1-8789-83583fa243d4\") " pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.232583 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.265004 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.373174 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r"] Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.579633 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t"] Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.726901 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j"] Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.832777 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jn96f"] Dec 05 08:20:53 crc kubenswrapper[4863]: W1205 08:20:53.837239 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6f48c43_3adb_4b52_95ab_dbc35a392423.slice/crio-4c194a71587f51a6368e29b52e576343d9b0f2f9de3321ca7d334c7bd8121532 WatchSource:0}: Error finding container 4c194a71587f51a6368e29b52e576343d9b0f2f9de3321ca7d334c7bd8121532: Status 404 returned error can't find the container with id 4c194a71587f51a6368e29b52e576343d9b0f2f9de3321ca7d334c7bd8121532 Dec 05 08:20:53 crc kubenswrapper[4863]: I1205 08:20:53.940580 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lpvbx"] Dec 05 08:20:53 crc kubenswrapper[4863]: W1205 08:20:53.943302 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84158b8a_71ed_4dc1_8789_83583fa243d4.slice/crio-b6bb784517932c605f512b068e9683d5dcf9d90ca832c2dd10535eb313b9bd03 WatchSource:0}: Error finding container b6bb784517932c605f512b068e9683d5dcf9d90ca832c2dd10535eb313b9bd03: Status 404 returned error can't find the container with id b6bb784517932c605f512b068e9683d5dcf9d90ca832c2dd10535eb313b9bd03 Dec 05 08:20:54 crc kubenswrapper[4863]: I1205 08:20:54.239406 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" event={"ID":"84158b8a-71ed-4dc1-8789-83583fa243d4","Type":"ContainerStarted","Data":"b6bb784517932c605f512b068e9683d5dcf9d90ca832c2dd10535eb313b9bd03"} Dec 05 08:20:54 crc kubenswrapper[4863]: I1205 08:20:54.240616 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" event={"ID":"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea","Type":"ContainerStarted","Data":"fddf5ab8a9414499dccb5af367b62b392c373974362cf63f281c9ff63c097e91"} Dec 05 08:20:54 crc kubenswrapper[4863]: I1205 08:20:54.242324 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" event={"ID":"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a","Type":"ContainerStarted","Data":"175bbf93597de3c717e9d5cb486cb9dc9c54610d46645d5d474183fc1f4bc3d3"} Dec 05 08:20:54 crc kubenswrapper[4863]: I1205 08:20:54.243734 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" event={"ID":"d6f48c43-3adb-4b52-95ab-dbc35a392423","Type":"ContainerStarted","Data":"4c194a71587f51a6368e29b52e576343d9b0f2f9de3321ca7d334c7bd8121532"} Dec 05 08:20:54 crc kubenswrapper[4863]: I1205 08:20:54.245112 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" event={"ID":"d7194909-83dd-41b1-af13-9a8b6212f1b3","Type":"ContainerStarted","Data":"191772a9fcd8e95506bce8b4fca7ed8272bf61f3adbd452742b6b2e82b9fdd0c"} Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.344339 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" event={"ID":"4a6c106e-c0dd-41c6-b598-fd8a1362b9ea","Type":"ContainerStarted","Data":"5996290dca4cd8f7221b2593e50544a892cb9f5fe61fc3c82d7621be02ca0242"} Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.345954 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" event={"ID":"f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a","Type":"ContainerStarted","Data":"2547c0ad71f7cec57b5d088057fd04584268df983c3a09aad58d348a725c067b"} Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.347788 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" event={"ID":"d6f48c43-3adb-4b52-95ab-dbc35a392423","Type":"ContainerStarted","Data":"22c40d17453f30318a94e1479b8e4e29e789e9cbfac00916ac01391963614a1f"} Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.347863 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.349136 4863 patch_prober.go:28] interesting pod/observability-operator-d8bb48f5d-jn96f container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.1.103:8081/healthz\": dial tcp 10.217.1.103:8081: connect: connection refused" start-of-body= Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.349177 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" podUID="d6f48c43-3adb-4b52-95ab-dbc35a392423" containerName="operator" probeResult="failure" output="Get \"http://10.217.1.103:8081/healthz\": dial tcp 10.217.1.103:8081: connect: connection refused" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.350489 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" event={"ID":"84158b8a-71ed-4dc1-8789-83583fa243d4","Type":"ContainerStarted","Data":"2d041dc25d764d671fc786d3590672df78a14780b33ea17577b6b1759833e02c"} Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.350644 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.368524 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j" podStartSLOduration=2.345325395 podStartE2EDuration="11.368504556s" podCreationTimestamp="2025-12-05 08:20:52 +0000 UTC" firstStartedPulling="2025-12-05 08:20:53.742818324 +0000 UTC m=+5681.468815364" lastFinishedPulling="2025-12-05 08:21:02.765997485 +0000 UTC m=+5690.491994525" observedRunningTime="2025-12-05 08:21:03.36213161 +0000 UTC m=+5691.088128660" watchObservedRunningTime="2025-12-05 08:21:03.368504556 +0000 UTC m=+5691.094501596" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.415161 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t" podStartSLOduration=2.237891258 podStartE2EDuration="11.415133887s" podCreationTimestamp="2025-12-05 08:20:52 +0000 UTC" firstStartedPulling="2025-12-05 08:20:53.588714845 +0000 UTC m=+5681.314711885" lastFinishedPulling="2025-12-05 08:21:02.765957474 +0000 UTC m=+5690.491954514" observedRunningTime="2025-12-05 08:21:03.410459754 +0000 UTC m=+5691.136456794" watchObservedRunningTime="2025-12-05 08:21:03.415133887 +0000 UTC m=+5691.141130927" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.481824 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" podStartSLOduration=2.453232512 podStartE2EDuration="11.481798565s" podCreationTimestamp="2025-12-05 08:20:52 +0000 UTC" firstStartedPulling="2025-12-05 08:20:53.841464327 +0000 UTC m=+5681.567461367" lastFinishedPulling="2025-12-05 08:21:02.87003038 +0000 UTC m=+5690.596027420" observedRunningTime="2025-12-05 08:21:03.450811583 +0000 UTC m=+5691.176808643" watchObservedRunningTime="2025-12-05 08:21:03.481798565 +0000 UTC m=+5691.207795605" Dec 05 08:21:03 crc kubenswrapper[4863]: I1205 08:21:03.508757 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" podStartSLOduration=2.685779585 podStartE2EDuration="11.508739068s" podCreationTimestamp="2025-12-05 08:20:52 +0000 UTC" firstStartedPulling="2025-12-05 08:20:53.945433811 +0000 UTC m=+5681.671430851" lastFinishedPulling="2025-12-05 08:21:02.768393294 +0000 UTC m=+5690.494390334" observedRunningTime="2025-12-05 08:21:03.495696592 +0000 UTC m=+5691.221693632" watchObservedRunningTime="2025-12-05 08:21:03.508739068 +0000 UTC m=+5691.234736108" Dec 05 08:21:04 crc kubenswrapper[4863]: I1205 08:21:04.365704 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" event={"ID":"d7194909-83dd-41b1-af13-9a8b6212f1b3","Type":"ContainerStarted","Data":"e69cb2682f1262733a735fd82de6be13074e667e19a04b83fdb1e5b24b729304"} Dec 05 08:21:04 crc kubenswrapper[4863]: I1205 08:21:04.367266 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-jn96f" Dec 05 08:21:04 crc kubenswrapper[4863]: I1205 08:21:04.384597 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-b8q8r" podStartSLOduration=2.933171298 podStartE2EDuration="12.384571949s" podCreationTimestamp="2025-12-05 08:20:52 +0000 UTC" firstStartedPulling="2025-12-05 08:20:53.391200693 +0000 UTC m=+5681.117197733" lastFinishedPulling="2025-12-05 08:21:02.842601344 +0000 UTC m=+5690.568598384" observedRunningTime="2025-12-05 08:21:04.380733016 +0000 UTC m=+5692.106730056" watchObservedRunningTime="2025-12-05 08:21:04.384571949 +0000 UTC m=+5692.110568989" Dec 05 08:21:08 crc kubenswrapper[4863]: I1205 08:21:08.464578 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:21:08 crc kubenswrapper[4863]: I1205 08:21:08.465242 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:21:13 crc kubenswrapper[4863]: I1205 08:21:13.268682 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-lpvbx" Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.068641 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-g948k"] Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.083453 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2374-account-create-update-g9tph"] Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.094575 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-g948k"] Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.104756 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2374-account-create-update-g9tph"] Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.611656 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85a2d9b6-4db2-4896-84f0-ee58f4750876" path="/var/lib/kubelet/pods/85a2d9b6-4db2-4896-84f0-ee58f4750876/volumes" Dec 05 08:21:14 crc kubenswrapper[4863]: I1205 08:21:14.612570 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e6d82a-c499-4bad-a79a-1730a74162db" path="/var/lib/kubelet/pods/e2e6d82a-c499-4bad-a79a-1730a74162db/volumes" Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.758635 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.758846 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" containerName="openstackclient" containerID="cri-o://2262a2baba028670732e6e118f56344acc0a997c7db2c3f63a75be8ecf507486" gracePeriod=2 Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.775602 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.873868 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 08:21:15 crc kubenswrapper[4863]: E1205 08:21:15.875110 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" containerName="openstackclient" Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.875191 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" containerName="openstackclient" Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.875652 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" containerName="openstackclient" Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.880834 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.889209 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:21:15 crc kubenswrapper[4863]: I1205 08:21:15.909961 4863 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" podUID="a69b1ece-1229-466e-9427-08948d0e1144" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.010354 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.014049 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.016949 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-bbchx" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.022413 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.022490 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config-secret\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.022629 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnlff\" (UniqueName: \"kubernetes.io/projected/a69b1ece-1229-466e-9427-08948d0e1144-kube-api-access-qnlff\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.036834 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.124606 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf6l2\" (UniqueName: \"kubernetes.io/projected/413bfba6-563e-4617-90a0-de3e47ee0ef3-kube-api-access-hf6l2\") pod \"kube-state-metrics-0\" (UID: \"413bfba6-563e-4617-90a0-de3e47ee0ef3\") " pod="openstack/kube-state-metrics-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.124695 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.124717 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config-secret\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.124749 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnlff\" (UniqueName: \"kubernetes.io/projected/a69b1ece-1229-466e-9427-08948d0e1144-kube-api-access-qnlff\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.125659 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.131908 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a69b1ece-1229-466e-9427-08948d0e1144-openstack-config-secret\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.158140 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnlff\" (UniqueName: \"kubernetes.io/projected/a69b1ece-1229-466e-9427-08948d0e1144-kube-api-access-qnlff\") pod \"openstackclient\" (UID: \"a69b1ece-1229-466e-9427-08948d0e1144\") " pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.226861 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf6l2\" (UniqueName: \"kubernetes.io/projected/413bfba6-563e-4617-90a0-de3e47ee0ef3-kube-api-access-hf6l2\") pod \"kube-state-metrics-0\" (UID: \"413bfba6-563e-4617-90a0-de3e47ee0ef3\") " pod="openstack/kube-state-metrics-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.248137 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.274566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf6l2\" (UniqueName: \"kubernetes.io/projected/413bfba6-563e-4617-90a0-de3e47ee0ef3-kube-api-access-hf6l2\") pod \"kube-state-metrics-0\" (UID: \"413bfba6-563e-4617-90a0-de3e47ee0ef3\") " pod="openstack/kube-state-metrics-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.333811 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.926238 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.930487 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.952248 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.952737 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.952881 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.955117 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-q5g62" Dec 05 08:21:16 crc kubenswrapper[4863]: I1205 08:21:16.957270 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.018044 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.054843 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055091 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055229 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055420 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055549 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055654 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.055760 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbcjq\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-kube-api-access-lbcjq\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.174039 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.176698 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.176920 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.177057 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbcjq\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-kube-api-access-lbcjq\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.177182 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.177462 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.177693 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.180531 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.182430 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.211382 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.213747 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.215886 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.229124 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.238218 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbcjq\" (UniqueName: \"kubernetes.io/projected/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-kube-api-access-lbcjq\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.239170 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c3190ef6-e28b-43a9-bcaa-ab22eacb8142-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c3190ef6-e28b-43a9-bcaa-ab22eacb8142\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.310145 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.360608 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.453487 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.457028 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.467675 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.467903 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.467917 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.468033 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.468147 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.468231 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-hs597" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.475275 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.538791 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"413bfba6-563e-4617-90a0-de3e47ee0ef3","Type":"ContainerStarted","Data":"cd3dd6dbdb4125ba33b9af6aed4c4f07015e53281fabff428b9c3adbea81f5bb"} Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.548560 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a69b1ece-1229-466e-9427-08948d0e1144","Type":"ContainerStarted","Data":"c6b58c218657e027f6e931a6b94d96628a7743b868f0ca05772187f5c2fcadf8"} Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599261 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/0923802c-8b5d-46ef-b409-ad5f7959ff09-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599323 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n97cv\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-kube-api-access-n97cv\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599355 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599374 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0923802c-8b5d-46ef-b409-ad5f7959ff09-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599400 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599490 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599519 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.599544 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.705715 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/0923802c-8b5d-46ef-b409-ad5f7959ff09-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.705989 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n97cv\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-kube-api-access-n97cv\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706031 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706048 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0923802c-8b5d-46ef-b409-ad5f7959ff09-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706072 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706138 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706166 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706186 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.706530 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/0923802c-8b5d-46ef-b409-ad5f7959ff09-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.711131 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.717888 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.721566 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0923802c-8b5d-46ef-b409-ad5f7959ff09-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.724874 4863 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.724941 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f5cad9ffe100631cce58dbd9faee68eda9894581ca49d85a2a9fa48e9646b5f8/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.728243 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.752344 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n97cv\" (UniqueName: \"kubernetes.io/projected/0923802c-8b5d-46ef-b409-ad5f7959ff09-kube-api-access-n97cv\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.752779 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0923802c-8b5d-46ef-b409-ad5f7959ff09-config\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:17 crc kubenswrapper[4863]: I1205 08:21:17.865564 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-08ff834c-97ae-44ca-9f01-5c3a02fa5940\") pod \"prometheus-metric-storage-0\" (UID: \"0923802c-8b5d-46ef-b409-ad5f7959ff09\") " pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.086648 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.109535 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.559502 4863 generic.go:334] "Generic (PLEG): container finished" podID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" containerID="2262a2baba028670732e6e118f56344acc0a997c7db2c3f63a75be8ecf507486" exitCode=137 Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.559714 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac376e11f0bfed87e8b20f3a6a07790f5d700e3f9cccda75a2ec767f069fbecc" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.560609 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a69b1ece-1229-466e-9427-08948d0e1144","Type":"ContainerStarted","Data":"714ccbc79dc08b99c5c75aa0cd0d15fd21035e80caf8d73d72a0a7c625f534b4"} Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.565264 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"413bfba6-563e-4617-90a0-de3e47ee0ef3","Type":"ContainerStarted","Data":"e40f4e95913d3f3959808d886a334c48909e0b520ced37fd0669580b43ec3aa4"} Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.566104 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.567436 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c3190ef6-e28b-43a9-bcaa-ab22eacb8142","Type":"ContainerStarted","Data":"cb0c719ed3f1610ae5482842c92847d34a05a3a67d3fbf6527755f600a5c99ad"} Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.583953 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.583938397 podStartE2EDuration="3.583938397s" podCreationTimestamp="2025-12-05 08:21:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:21:18.579495109 +0000 UTC m=+5706.305492169" watchObservedRunningTime="2025-12-05 08:21:18.583938397 +0000 UTC m=+5706.309935437" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.629555 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.082297545 podStartE2EDuration="3.629537273s" podCreationTimestamp="2025-12-05 08:21:15 +0000 UTC" firstStartedPulling="2025-12-05 08:21:17.216125838 +0000 UTC m=+5704.942122878" lastFinishedPulling="2025-12-05 08:21:17.763365576 +0000 UTC m=+5705.489362606" observedRunningTime="2025-12-05 08:21:18.603675036 +0000 UTC m=+5706.329672076" watchObservedRunningTime="2025-12-05 08:21:18.629537273 +0000 UTC m=+5706.355534313" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.646388 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.688896 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.729534 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret\") pod \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.729631 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk6b4\" (UniqueName: \"kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4\") pod \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.729684 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config\") pod \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\" (UID: \"a134c4bd-77c4-40ca-9c30-565c1edf00ab\") " Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.736212 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4" (OuterVolumeSpecName: "kube-api-access-lk6b4") pod "a134c4bd-77c4-40ca-9c30-565c1edf00ab" (UID: "a134c4bd-77c4-40ca-9c30-565c1edf00ab"). InnerVolumeSpecName "kube-api-access-lk6b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.756966 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a134c4bd-77c4-40ca-9c30-565c1edf00ab" (UID: "a134c4bd-77c4-40ca-9c30-565c1edf00ab"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.782185 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a134c4bd-77c4-40ca-9c30-565c1edf00ab" (UID: "a134c4bd-77c4-40ca-9c30-565c1edf00ab"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.832093 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.832132 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a134c4bd-77c4-40ca-9c30-565c1edf00ab-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 08:21:18 crc kubenswrapper[4863]: I1205 08:21:18.832147 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk6b4\" (UniqueName: \"kubernetes.io/projected/a134c4bd-77c4-40ca-9c30-565c1edf00ab-kube-api-access-lk6b4\") on node \"crc\" DevicePath \"\"" Dec 05 08:21:19 crc kubenswrapper[4863]: I1205 08:21:19.586550 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerStarted","Data":"d2057fc8804206c65af4404653b9ccbdafb16ac58ff593243633b2c5b51ede43"} Dec 05 08:21:19 crc kubenswrapper[4863]: I1205 08:21:19.586580 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:21:19 crc kubenswrapper[4863]: I1205 08:21:19.613917 4863 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" podUID="a69b1ece-1229-466e-9427-08948d0e1144" Dec 05 08:21:20 crc kubenswrapper[4863]: I1205 08:21:20.618829 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a134c4bd-77c4-40ca-9c30-565c1edf00ab" path="/var/lib/kubelet/pods/a134c4bd-77c4-40ca-9c30-565c1edf00ab/volumes" Dec 05 08:21:23 crc kubenswrapper[4863]: I1205 08:21:23.650264 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c3190ef6-e28b-43a9-bcaa-ab22eacb8142","Type":"ContainerStarted","Data":"4fa839ece5e735f68b29d3c711398288c703e04ae0b7664423fea203926495af"} Dec 05 08:21:23 crc kubenswrapper[4863]: I1205 08:21:23.652993 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerStarted","Data":"760d337ee6a2eeb300d27c30459e8326c17b9d28c5440154a65d8190f70b6857"} Dec 05 08:21:26 crc kubenswrapper[4863]: I1205 08:21:26.341884 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 08:21:29 crc kubenswrapper[4863]: I1205 08:21:29.707580 4863 generic.go:334] "Generic (PLEG): container finished" podID="c3190ef6-e28b-43a9-bcaa-ab22eacb8142" containerID="4fa839ece5e735f68b29d3c711398288c703e04ae0b7664423fea203926495af" exitCode=0 Dec 05 08:21:29 crc kubenswrapper[4863]: I1205 08:21:29.707664 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c3190ef6-e28b-43a9-bcaa-ab22eacb8142","Type":"ContainerDied","Data":"4fa839ece5e735f68b29d3c711398288c703e04ae0b7664423fea203926495af"} Dec 05 08:21:30 crc kubenswrapper[4863]: I1205 08:21:30.723622 4863 generic.go:334] "Generic (PLEG): container finished" podID="0923802c-8b5d-46ef-b409-ad5f7959ff09" containerID="760d337ee6a2eeb300d27c30459e8326c17b9d28c5440154a65d8190f70b6857" exitCode=0 Dec 05 08:21:30 crc kubenswrapper[4863]: I1205 08:21:30.723688 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerDied","Data":"760d337ee6a2eeb300d27c30459e8326c17b9d28c5440154a65d8190f70b6857"} Dec 05 08:21:32 crc kubenswrapper[4863]: I1205 08:21:32.752107 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c3190ef6-e28b-43a9-bcaa-ab22eacb8142","Type":"ContainerStarted","Data":"3579fce026cf64e54ae57c39e584e34d636b94da17562a6e9f4b59e1d05d054f"} Dec 05 08:21:35 crc kubenswrapper[4863]: I1205 08:21:35.591778 4863 scope.go:117] "RemoveContainer" containerID="cdc5fea231d501aafe34503c4b547233475d9c3e68bd538372b432aa0453aa23" Dec 05 08:21:36 crc kubenswrapper[4863]: I1205 08:21:36.799728 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c3190ef6-e28b-43a9-bcaa-ab22eacb8142","Type":"ContainerStarted","Data":"23c36f5a4b37ce02ee6ffcd181d0c204beadd5a21f122a2ed51eae90438848c1"} Dec 05 08:21:36 crc kubenswrapper[4863]: I1205 08:21:36.800076 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:36 crc kubenswrapper[4863]: I1205 08:21:36.805901 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 05 08:21:36 crc kubenswrapper[4863]: I1205 08:21:36.834255 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=7.213872688 podStartE2EDuration="20.834238217s" podCreationTimestamp="2025-12-05 08:21:16 +0000 UTC" firstStartedPulling="2025-12-05 08:21:18.112798634 +0000 UTC m=+5705.838795674" lastFinishedPulling="2025-12-05 08:21:31.733164163 +0000 UTC m=+5719.459161203" observedRunningTime="2025-12-05 08:21:36.82982661 +0000 UTC m=+5724.555823650" watchObservedRunningTime="2025-12-05 08:21:36.834238217 +0000 UTC m=+5724.560235277" Dec 05 08:21:37 crc kubenswrapper[4863]: I1205 08:21:37.125113 4863 scope.go:117] "RemoveContainer" containerID="9d95fce929aa765c1942593067ba192e63e4400d6a05a590c53a5fe4321310d4" Dec 05 08:21:37 crc kubenswrapper[4863]: I1205 08:21:37.196489 4863 scope.go:117] "RemoveContainer" containerID="2262a2baba028670732e6e118f56344acc0a997c7db2c3f63a75be8ecf507486" Dec 05 08:21:37 crc kubenswrapper[4863]: I1205 08:21:37.819525 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerStarted","Data":"c1cc7074ba2b7072e570f641c51d295dc5a1bf7b539444bb4c279b9feb6b3fba"} Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.464630 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.465063 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.465130 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.466270 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.466377 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" gracePeriod=600 Dec 05 08:21:38 crc kubenswrapper[4863]: E1205 08:21:38.586904 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.833012 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" exitCode=0 Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.833716 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c"} Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.833789 4863 scope.go:117] "RemoveContainer" containerID="eee869fb7e6f374e18166f19582d12b2fb32088b174d35edb03957c7b461d2cb" Dec 05 08:21:38 crc kubenswrapper[4863]: I1205 08:21:38.834880 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:21:38 crc kubenswrapper[4863]: E1205 08:21:38.835600 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:21:40 crc kubenswrapper[4863]: I1205 08:21:40.036791 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-vbfft"] Dec 05 08:21:40 crc kubenswrapper[4863]: I1205 08:21:40.047383 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-vbfft"] Dec 05 08:21:40 crc kubenswrapper[4863]: I1205 08:21:40.620889 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="862ddeea-c33e-4ac2-99b0-476bb6451fbe" path="/var/lib/kubelet/pods/862ddeea-c33e-4ac2-99b0-476bb6451fbe/volumes" Dec 05 08:21:40 crc kubenswrapper[4863]: I1205 08:21:40.865806 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerStarted","Data":"c29a523e46c92ecbe7f750325f661b3f9c3a0b1c37d1dc97ca7ae715a526c134"} Dec 05 08:21:44 crc kubenswrapper[4863]: I1205 08:21:44.915105 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"0923802c-8b5d-46ef-b409-ad5f7959ff09","Type":"ContainerStarted","Data":"bbed7c174d94c7e738f50882eabfbd7a92e63e307c15e75e43af9b0a4f411aed"} Dec 05 08:21:44 crc kubenswrapper[4863]: I1205 08:21:44.950866 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=2.993798271 podStartE2EDuration="28.950845969s" podCreationTimestamp="2025-12-05 08:21:16 +0000 UTC" firstStartedPulling="2025-12-05 08:21:18.687751436 +0000 UTC m=+5706.413748476" lastFinishedPulling="2025-12-05 08:21:44.644799114 +0000 UTC m=+5732.370796174" observedRunningTime="2025-12-05 08:21:44.935181619 +0000 UTC m=+5732.661178669" watchObservedRunningTime="2025-12-05 08:21:44.950845969 +0000 UTC m=+5732.676843009" Dec 05 08:21:48 crc kubenswrapper[4863]: I1205 08:21:48.109851 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:48 crc kubenswrapper[4863]: I1205 08:21:48.110434 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:48 crc kubenswrapper[4863]: I1205 08:21:48.115551 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:48 crc kubenswrapper[4863]: I1205 08:21:48.956836 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.527455 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.529724 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.532671 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.532729 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.545112 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667273 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77szz\" (UniqueName: \"kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667326 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667387 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667487 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667545 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667584 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.667664 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770601 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770682 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770782 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770882 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77szz\" (UniqueName: \"kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770908 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.770937 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.771044 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.771395 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.771999 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.786227 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.786437 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.788817 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77szz\" (UniqueName: \"kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.789247 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.795736 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " pod="openstack/ceilometer-0" Dec 05 08:21:49 crc kubenswrapper[4863]: I1205 08:21:49.855589 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:21:50 crc kubenswrapper[4863]: I1205 08:21:50.423336 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:21:50 crc kubenswrapper[4863]: I1205 08:21:50.602202 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:21:50 crc kubenswrapper[4863]: E1205 08:21:50.602905 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:21:50 crc kubenswrapper[4863]: I1205 08:21:50.982612 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerStarted","Data":"0427f23e156aab4c90be99d6126df9ec787866056b1f5d7a8eda135c8a75b037"} Dec 05 08:21:55 crc kubenswrapper[4863]: I1205 08:21:55.043826 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerStarted","Data":"ace934af95b06d9e96209d9df7ba7578f5e48aa661e6550b21ace90356a4c9ca"} Dec 05 08:21:56 crc kubenswrapper[4863]: I1205 08:21:56.054157 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerStarted","Data":"09317c17e7497e1ad54b536ba90326b34b4f00d03844476a2da6f910ff27549c"} Dec 05 08:21:57 crc kubenswrapper[4863]: I1205 08:21:57.064984 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerStarted","Data":"17727eab2a537afc9885dfc2e58dd2e3eb9931f160b3dcb647f94cb9996d2c9f"} Dec 05 08:21:59 crc kubenswrapper[4863]: I1205 08:21:59.085969 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerStarted","Data":"9782caa23cb6fefa3a60d724ef54e39e9b53eecf06f2859094806bbd25d771c0"} Dec 05 08:21:59 crc kubenswrapper[4863]: I1205 08:21:59.087863 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:21:59 crc kubenswrapper[4863]: I1205 08:21:59.122638 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.423813223 podStartE2EDuration="10.122619908s" podCreationTimestamp="2025-12-05 08:21:49 +0000 UTC" firstStartedPulling="2025-12-05 08:21:50.424459514 +0000 UTC m=+5738.150456554" lastFinishedPulling="2025-12-05 08:21:58.123266199 +0000 UTC m=+5745.849263239" observedRunningTime="2025-12-05 08:21:59.120985098 +0000 UTC m=+5746.846982138" watchObservedRunningTime="2025-12-05 08:21:59.122619908 +0000 UTC m=+5746.848616948" Dec 05 08:22:03 crc kubenswrapper[4863]: I1205 08:22:03.603153 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:22:03 crc kubenswrapper[4863]: E1205 08:22:03.604128 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.336679 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-ssf6l"] Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.338345 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.348953 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-ssf6l"] Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.416065 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.416186 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkdf7\" (UniqueName: \"kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.446606 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-3cac-account-create-update-bgjnk"] Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.448284 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.452280 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.457433 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-3cac-account-create-update-bgjnk"] Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.517967 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp2rv\" (UniqueName: \"kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.518108 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.518191 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkdf7\" (UniqueName: \"kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.518216 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.518845 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.536393 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkdf7\" (UniqueName: \"kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7\") pod \"aodh-db-create-ssf6l\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.620428 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.620581 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp2rv\" (UniqueName: \"kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.621273 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.640542 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp2rv\" (UniqueName: \"kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv\") pod \"aodh-3cac-account-create-update-bgjnk\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.716088 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:06 crc kubenswrapper[4863]: I1205 08:22:06.779513 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:07 crc kubenswrapper[4863]: I1205 08:22:07.236648 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-ssf6l"] Dec 05 08:22:07 crc kubenswrapper[4863]: W1205 08:22:07.239189 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc0ebd98_9d3b_4a7d_8628_12fff50b1d7b.slice/crio-908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3 WatchSource:0}: Error finding container 908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3: Status 404 returned error can't find the container with id 908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3 Dec 05 08:22:07 crc kubenswrapper[4863]: I1205 08:22:07.354993 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-3cac-account-create-update-bgjnk"] Dec 05 08:22:07 crc kubenswrapper[4863]: W1205 08:22:07.357073 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ec2abf8_0166_4043_b0fa_e660b04dad5a.slice/crio-221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe WatchSource:0}: Error finding container 221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe: Status 404 returned error can't find the container with id 221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.184899 4863 generic.go:334] "Generic (PLEG): container finished" podID="dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" containerID="f8dab3e5b8ac369f73c251aea0e0ee0b4a01b521921c4d222324b8caea3ef743" exitCode=0 Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.185006 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-ssf6l" event={"ID":"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b","Type":"ContainerDied","Data":"f8dab3e5b8ac369f73c251aea0e0ee0b4a01b521921c4d222324b8caea3ef743"} Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.185329 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-ssf6l" event={"ID":"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b","Type":"ContainerStarted","Data":"908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3"} Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.187275 4863 generic.go:334] "Generic (PLEG): container finished" podID="3ec2abf8-0166-4043-b0fa-e660b04dad5a" containerID="dd7baf5589147826871463ab8226b61e4c0422c79e8d8fd6bd7034afa299387d" exitCode=0 Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.187299 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3cac-account-create-update-bgjnk" event={"ID":"3ec2abf8-0166-4043-b0fa-e660b04dad5a","Type":"ContainerDied","Data":"dd7baf5589147826871463ab8226b61e4c0422c79e8d8fd6bd7034afa299387d"} Dec 05 08:22:08 crc kubenswrapper[4863]: I1205 08:22:08.187315 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3cac-account-create-update-bgjnk" event={"ID":"3ec2abf8-0166-4043-b0fa-e660b04dad5a","Type":"ContainerStarted","Data":"221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe"} Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.743230 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.752282 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.885599 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp2rv\" (UniqueName: \"kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv\") pod \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.885732 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts\") pod \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\" (UID: \"3ec2abf8-0166-4043-b0fa-e660b04dad5a\") " Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.885993 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts\") pod \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.886082 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkdf7\" (UniqueName: \"kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7\") pod \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\" (UID: \"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b\") " Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.886185 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ec2abf8-0166-4043-b0fa-e660b04dad5a" (UID: "3ec2abf8-0166-4043-b0fa-e660b04dad5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.886727 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ec2abf8-0166-4043-b0fa-e660b04dad5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.887158 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" (UID: "dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.891973 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7" (OuterVolumeSpecName: "kube-api-access-mkdf7") pod "dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" (UID: "dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b"). InnerVolumeSpecName "kube-api-access-mkdf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.892346 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv" (OuterVolumeSpecName: "kube-api-access-bp2rv") pod "3ec2abf8-0166-4043-b0fa-e660b04dad5a" (UID: "3ec2abf8-0166-4043-b0fa-e660b04dad5a"). InnerVolumeSpecName "kube-api-access-bp2rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.989644 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.989688 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkdf7\" (UniqueName: \"kubernetes.io/projected/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b-kube-api-access-mkdf7\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:09 crc kubenswrapper[4863]: I1205 08:22:09.989703 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp2rv\" (UniqueName: \"kubernetes.io/projected/3ec2abf8-0166-4043-b0fa-e660b04dad5a-kube-api-access-bp2rv\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.213850 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3cac-account-create-update-bgjnk" event={"ID":"3ec2abf8-0166-4043-b0fa-e660b04dad5a","Type":"ContainerDied","Data":"221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe"} Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.213898 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="221c5488549f8bb12812af6827d6c5450b941152415a946504e13029aa50e8fe" Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.213894 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3cac-account-create-update-bgjnk" Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.215733 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-ssf6l" event={"ID":"dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b","Type":"ContainerDied","Data":"908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3"} Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.215761 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="908ac00e492cdb17cac259cd90b25b50a61b4f6ee5c43f47f40ddca433bc90d3" Dec 05 08:22:10 crc kubenswrapper[4863]: I1205 08:22:10.215815 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-ssf6l" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.047007 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-58a8-account-create-update-z2xr4"] Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.063861 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-792qb"] Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.074704 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-58a8-account-create-update-z2xr4"] Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.083434 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-792qb"] Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.723376 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-4plh9"] Dec 05 08:22:11 crc kubenswrapper[4863]: E1205 08:22:11.724313 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec2abf8-0166-4043-b0fa-e660b04dad5a" containerName="mariadb-account-create-update" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.724338 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec2abf8-0166-4043-b0fa-e660b04dad5a" containerName="mariadb-account-create-update" Dec 05 08:22:11 crc kubenswrapper[4863]: E1205 08:22:11.724360 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" containerName="mariadb-database-create" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.724370 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" containerName="mariadb-database-create" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.724673 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec2abf8-0166-4043-b0fa-e660b04dad5a" containerName="mariadb-account-create-update" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.724713 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" containerName="mariadb-database-create" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.725680 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.727979 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-8nrxv" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.728692 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.729068 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.733155 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.734446 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-4plh9"] Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.824975 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtzf8\" (UniqueName: \"kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.825061 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.825096 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.825201 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.926701 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.926809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtzf8\" (UniqueName: \"kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.926874 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.926915 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.934652 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.937284 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.946410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:11 crc kubenswrapper[4863]: I1205 08:22:11.947875 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtzf8\" (UniqueName: \"kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8\") pod \"aodh-db-sync-4plh9\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:12 crc kubenswrapper[4863]: I1205 08:22:12.045228 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:12 crc kubenswrapper[4863]: I1205 08:22:12.557538 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-4plh9"] Dec 05 08:22:12 crc kubenswrapper[4863]: I1205 08:22:12.625337 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ebb64a-49f7-4c64-b02b-5a021b1738fb" path="/var/lib/kubelet/pods/32ebb64a-49f7-4c64-b02b-5a021b1738fb/volumes" Dec 05 08:22:12 crc kubenswrapper[4863]: I1205 08:22:12.626641 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500d36fb-e8a7-4a4a-87ab-0deb8ee411a7" path="/var/lib/kubelet/pods/500d36fb-e8a7-4a4a-87ab-0deb8ee411a7/volumes" Dec 05 08:22:13 crc kubenswrapper[4863]: I1205 08:22:13.246528 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4plh9" event={"ID":"3841bf0a-8d71-42a5-891f-211901e10a31","Type":"ContainerStarted","Data":"4a55343065ccc0be1fb36758d3b58db37d28096adc0b45c225cfe4293333ccdc"} Dec 05 08:22:15 crc kubenswrapper[4863]: I1205 08:22:15.602100 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:22:15 crc kubenswrapper[4863]: E1205 08:22:15.603235 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:22:17 crc kubenswrapper[4863]: I1205 08:22:17.099190 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:22:18 crc kubenswrapper[4863]: I1205 08:22:18.297230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4plh9" event={"ID":"3841bf0a-8d71-42a5-891f-211901e10a31","Type":"ContainerStarted","Data":"80f802e9f43c39eee411b3568993c3137e9433c414d1587f5cfe9e21b23fc35f"} Dec 05 08:22:18 crc kubenswrapper[4863]: I1205 08:22:18.319731 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-4plh9" podStartSLOduration=2.783510225 podStartE2EDuration="7.319707002s" podCreationTimestamp="2025-12-05 08:22:11 +0000 UTC" firstStartedPulling="2025-12-05 08:22:12.560364635 +0000 UTC m=+5760.286361665" lastFinishedPulling="2025-12-05 08:22:17.096561402 +0000 UTC m=+5764.822558442" observedRunningTime="2025-12-05 08:22:18.312834405 +0000 UTC m=+5766.038831445" watchObservedRunningTime="2025-12-05 08:22:18.319707002 +0000 UTC m=+5766.045704052" Dec 05 08:22:19 crc kubenswrapper[4863]: I1205 08:22:19.865751 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:22:20 crc kubenswrapper[4863]: I1205 08:22:20.317612 4863 generic.go:334] "Generic (PLEG): container finished" podID="3841bf0a-8d71-42a5-891f-211901e10a31" containerID="80f802e9f43c39eee411b3568993c3137e9433c414d1587f5cfe9e21b23fc35f" exitCode=0 Dec 05 08:22:20 crc kubenswrapper[4863]: I1205 08:22:20.317703 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4plh9" event={"ID":"3841bf0a-8d71-42a5-891f-211901e10a31","Type":"ContainerDied","Data":"80f802e9f43c39eee411b3568993c3137e9433c414d1587f5cfe9e21b23fc35f"} Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.861306 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.945042 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle\") pod \"3841bf0a-8d71-42a5-891f-211901e10a31\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.945193 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data\") pod \"3841bf0a-8d71-42a5-891f-211901e10a31\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.945343 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtzf8\" (UniqueName: \"kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8\") pod \"3841bf0a-8d71-42a5-891f-211901e10a31\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.945432 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts\") pod \"3841bf0a-8d71-42a5-891f-211901e10a31\" (UID: \"3841bf0a-8d71-42a5-891f-211901e10a31\") " Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.951302 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8" (OuterVolumeSpecName: "kube-api-access-xtzf8") pod "3841bf0a-8d71-42a5-891f-211901e10a31" (UID: "3841bf0a-8d71-42a5-891f-211901e10a31"). InnerVolumeSpecName "kube-api-access-xtzf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.951819 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts" (OuterVolumeSpecName: "scripts") pod "3841bf0a-8d71-42a5-891f-211901e10a31" (UID: "3841bf0a-8d71-42a5-891f-211901e10a31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.977656 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data" (OuterVolumeSpecName: "config-data") pod "3841bf0a-8d71-42a5-891f-211901e10a31" (UID: "3841bf0a-8d71-42a5-891f-211901e10a31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:21 crc kubenswrapper[4863]: I1205 08:22:21.980729 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3841bf0a-8d71-42a5-891f-211901e10a31" (UID: "3841bf0a-8d71-42a5-891f-211901e10a31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.038629 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-mwppq"] Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.046709 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-mwppq"] Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.047802 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.047845 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtzf8\" (UniqueName: \"kubernetes.io/projected/3841bf0a-8d71-42a5-891f-211901e10a31-kube-api-access-xtzf8\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.047863 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.047875 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3841bf0a-8d71-42a5-891f-211901e10a31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.337287 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4plh9" event={"ID":"3841bf0a-8d71-42a5-891f-211901e10a31","Type":"ContainerDied","Data":"4a55343065ccc0be1fb36758d3b58db37d28096adc0b45c225cfe4293333ccdc"} Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.337328 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a55343065ccc0be1fb36758d3b58db37d28096adc0b45c225cfe4293333ccdc" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.337338 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4plh9" Dec 05 08:22:22 crc kubenswrapper[4863]: I1205 08:22:22.614719 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7107b12-f131-4b67-b3c2-34afcdc8dd67" path="/var/lib/kubelet/pods/d7107b12-f131-4b67-b3c2-34afcdc8dd67/volumes" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.817404 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 05 08:22:26 crc kubenswrapper[4863]: E1205 08:22:26.818648 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3841bf0a-8d71-42a5-891f-211901e10a31" containerName="aodh-db-sync" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.818665 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="3841bf0a-8d71-42a5-891f-211901e10a31" containerName="aodh-db-sync" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.818896 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="3841bf0a-8d71-42a5-891f-211901e10a31" containerName="aodh-db-sync" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.826516 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.834785 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.834979 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.835190 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-8nrxv" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.851958 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.954664 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-config-data\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.954721 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5mlq\" (UniqueName: \"kubernetes.io/projected/f665fbfc-2d49-498b-84f7-4f50900b8752-kube-api-access-f5mlq\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.954827 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:26 crc kubenswrapper[4863]: I1205 08:22:26.955026 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-scripts\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.064402 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-config-data\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.064797 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5mlq\" (UniqueName: \"kubernetes.io/projected/f665fbfc-2d49-498b-84f7-4f50900b8752-kube-api-access-f5mlq\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.064895 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.065367 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-scripts\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.079315 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.080114 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-scripts\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.086324 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f665fbfc-2d49-498b-84f7-4f50900b8752-config-data\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.089682 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5mlq\" (UniqueName: \"kubernetes.io/projected/f665fbfc-2d49-498b-84f7-4f50900b8752-kube-api-access-f5mlq\") pod \"aodh-0\" (UID: \"f665fbfc-2d49-498b-84f7-4f50900b8752\") " pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.147571 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 05 08:22:27 crc kubenswrapper[4863]: I1205 08:22:27.676024 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 05 08:22:27 crc kubenswrapper[4863]: W1205 08:22:27.677503 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf665fbfc_2d49_498b_84f7_4f50900b8752.slice/crio-8ffb195bb5b57cc38b5c65b64b88529c024cdc8207f79d04bcb3159418ed73c1 WatchSource:0}: Error finding container 8ffb195bb5b57cc38b5c65b64b88529c024cdc8207f79d04bcb3159418ed73c1: Status 404 returned error can't find the container with id 8ffb195bb5b57cc38b5c65b64b88529c024cdc8207f79d04bcb3159418ed73c1 Dec 05 08:22:28 crc kubenswrapper[4863]: I1205 08:22:28.393027 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f665fbfc-2d49-498b-84f7-4f50900b8752","Type":"ContainerStarted","Data":"8cb8ec672aa2b9f68b55b50c6aaa1f7cf30d1e11b2587efdf5484e3f967920e6"} Dec 05 08:22:28 crc kubenswrapper[4863]: I1205 08:22:28.393381 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f665fbfc-2d49-498b-84f7-4f50900b8752","Type":"ContainerStarted","Data":"8ffb195bb5b57cc38b5c65b64b88529c024cdc8207f79d04bcb3159418ed73c1"} Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.046693 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.047319 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-central-agent" containerID="cri-o://ace934af95b06d9e96209d9df7ba7578f5e48aa661e6550b21ace90356a4c9ca" gracePeriod=30 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.047416 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="proxy-httpd" containerID="cri-o://9782caa23cb6fefa3a60d724ef54e39e9b53eecf06f2859094806bbd25d771c0" gracePeriod=30 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.047427 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-notification-agent" containerID="cri-o://09317c17e7497e1ad54b536ba90326b34b4f00d03844476a2da6f910ff27549c" gracePeriod=30 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.047390 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="sg-core" containerID="cri-o://17727eab2a537afc9885dfc2e58dd2e3eb9931f160b3dcb647f94cb9996d2c9f" gracePeriod=30 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.405839 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f665fbfc-2d49-498b-84f7-4f50900b8752","Type":"ContainerStarted","Data":"8ce85523cace6ea1eeec229f033c5af77381d0daea91302f6fa6fa309ac0c203"} Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.409180 4863 generic.go:334] "Generic (PLEG): container finished" podID="faefd764-d947-483f-9848-81f5ea81afa1" containerID="9782caa23cb6fefa3a60d724ef54e39e9b53eecf06f2859094806bbd25d771c0" exitCode=0 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.409213 4863 generic.go:334] "Generic (PLEG): container finished" podID="faefd764-d947-483f-9848-81f5ea81afa1" containerID="17727eab2a537afc9885dfc2e58dd2e3eb9931f160b3dcb647f94cb9996d2c9f" exitCode=2 Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.409234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerDied","Data":"9782caa23cb6fefa3a60d724ef54e39e9b53eecf06f2859094806bbd25d771c0"} Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.409259 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerDied","Data":"17727eab2a537afc9885dfc2e58dd2e3eb9931f160b3dcb647f94cb9996d2c9f"} Dec 05 08:22:29 crc kubenswrapper[4863]: I1205 08:22:29.601725 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:22:29 crc kubenswrapper[4863]: E1205 08:22:29.601968 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:22:30 crc kubenswrapper[4863]: I1205 08:22:30.422591 4863 generic.go:334] "Generic (PLEG): container finished" podID="faefd764-d947-483f-9848-81f5ea81afa1" containerID="ace934af95b06d9e96209d9df7ba7578f5e48aa661e6550b21ace90356a4c9ca" exitCode=0 Dec 05 08:22:30 crc kubenswrapper[4863]: I1205 08:22:30.422700 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerDied","Data":"ace934af95b06d9e96209d9df7ba7578f5e48aa661e6550b21ace90356a4c9ca"} Dec 05 08:22:31 crc kubenswrapper[4863]: I1205 08:22:31.437750 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f665fbfc-2d49-498b-84f7-4f50900b8752","Type":"ContainerStarted","Data":"b6f83a45e2ce9844b02244ef0997aec04ac362ddb36f28e82bb87233f3c20b73"} Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.453213 4863 generic.go:334] "Generic (PLEG): container finished" podID="faefd764-d947-483f-9848-81f5ea81afa1" containerID="09317c17e7497e1ad54b536ba90326b34b4f00d03844476a2da6f910ff27549c" exitCode=0 Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.453300 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerDied","Data":"09317c17e7497e1ad54b536ba90326b34b4f00d03844476a2da6f910ff27549c"} Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.455990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f665fbfc-2d49-498b-84f7-4f50900b8752","Type":"ContainerStarted","Data":"2171c000bf25b8a9ae03c7b0a59c4091fcbcaac7338d2ceb52d9438d11626ea1"} Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.498780 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.36735546 podStartE2EDuration="6.498758885s" podCreationTimestamp="2025-12-05 08:22:26 +0000 UTC" firstStartedPulling="2025-12-05 08:22:27.68077207 +0000 UTC m=+5775.406769110" lastFinishedPulling="2025-12-05 08:22:31.812175495 +0000 UTC m=+5779.538172535" observedRunningTime="2025-12-05 08:22:32.478686228 +0000 UTC m=+5780.204683268" watchObservedRunningTime="2025-12-05 08:22:32.498758885 +0000 UTC m=+5780.224755925" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.718689 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888296 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888652 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888689 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888709 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77szz\" (UniqueName: \"kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888744 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888780 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888832 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml\") pod \"faefd764-d947-483f-9848-81f5ea81afa1\" (UID: \"faefd764-d947-483f-9848-81f5ea81afa1\") " Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.888938 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.889355 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.889583 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.916799 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz" (OuterVolumeSpecName: "kube-api-access-77szz") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "kube-api-access-77szz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.921006 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts" (OuterVolumeSpecName: "scripts") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.935685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.992627 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.992662 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77szz\" (UniqueName: \"kubernetes.io/projected/faefd764-d947-483f-9848-81f5ea81afa1-kube-api-access-77szz\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.992675 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/faefd764-d947-483f-9848-81f5ea81afa1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.992685 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:32 crc kubenswrapper[4863]: I1205 08:22:32.996870 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.031287 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data" (OuterVolumeSpecName: "config-data") pod "faefd764-d947-483f-9848-81f5ea81afa1" (UID: "faefd764-d947-483f-9848-81f5ea81afa1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.094194 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.094230 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faefd764-d947-483f-9848-81f5ea81afa1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.467357 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.467350 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"faefd764-d947-483f-9848-81f5ea81afa1","Type":"ContainerDied","Data":"0427f23e156aab4c90be99d6126df9ec787866056b1f5d7a8eda135c8a75b037"} Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.467431 4863 scope.go:117] "RemoveContainer" containerID="9782caa23cb6fefa3a60d724ef54e39e9b53eecf06f2859094806bbd25d771c0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.578690 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.586345 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.609302 4863 scope.go:117] "RemoveContainer" containerID="17727eab2a537afc9885dfc2e58dd2e3eb9931f160b3dcb647f94cb9996d2c9f" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.654375 4863 scope.go:117] "RemoveContainer" containerID="09317c17e7497e1ad54b536ba90326b34b4f00d03844476a2da6f910ff27549c" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.678072 4863 scope.go:117] "RemoveContainer" containerID="ace934af95b06d9e96209d9df7ba7578f5e48aa661e6550b21ace90356a4c9ca" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.810583 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:33 crc kubenswrapper[4863]: E1205 08:22:33.811206 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="sg-core" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811222 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="sg-core" Dec 05 08:22:33 crc kubenswrapper[4863]: E1205 08:22:33.811246 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-notification-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811253 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-notification-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: E1205 08:22:33.811277 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="proxy-httpd" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811282 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="proxy-httpd" Dec 05 08:22:33 crc kubenswrapper[4863]: E1205 08:22:33.811302 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-central-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811308 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-central-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811463 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-notification-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811493 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="sg-core" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811518 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="proxy-httpd" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.811530 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="faefd764-d947-483f-9848-81f5ea81afa1" containerName="ceilometer-central-agent" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.813250 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.816517 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.818577 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.828637 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915087 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915144 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915184 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915342 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915465 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915557 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx7zk\" (UniqueName: \"kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:33 crc kubenswrapper[4863]: I1205 08:22:33.915598 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017007 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx7zk\" (UniqueName: \"kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017057 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017192 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017219 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017245 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017278 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017309 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.017823 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.018845 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.027065 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.032711 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.032977 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx7zk\" (UniqueName: \"kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.036182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.036352 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data\") pod \"ceilometer-0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.136745 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.613106 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faefd764-d947-483f-9848-81f5ea81afa1" path="/var/lib/kubelet/pods/faefd764-d947-483f-9848-81f5ea81afa1/volumes" Dec 05 08:22:34 crc kubenswrapper[4863]: I1205 08:22:34.689955 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:22:35 crc kubenswrapper[4863]: I1205 08:22:35.494460 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerStarted","Data":"2827fcf4ce2f2d4ce6b508e2b7589b3bdde80708687b10ad5d564ec929ab2d9b"} Dec 05 08:22:35 crc kubenswrapper[4863]: I1205 08:22:35.495321 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerStarted","Data":"9c6097e3fcb75151fbbd7720b9a9f9a2d6d0d48c7df4e10bb453d40c8f4a84f7"} Dec 05 08:22:35 crc kubenswrapper[4863]: I1205 08:22:35.495336 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerStarted","Data":"5104edf58e4f8b4823f300399fe3b24e3a7f9ad33f328bc5e85d7a8d3458bf63"} Dec 05 08:22:36 crc kubenswrapper[4863]: I1205 08:22:36.504986 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerStarted","Data":"111abe362c7e7481fa57a744d9138f49e38f21dffc29419fd5fbd3d2219b64cd"} Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.379520 4863 scope.go:117] "RemoveContainer" containerID="ab5afe535043e2f2ea675a29ba5891fdce9e0bd333259f9f96d87994794ce3b8" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.408418 4863 scope.go:117] "RemoveContainer" containerID="cff358e6acff753e870f35a908dd4b26a5f2a6b845bd7837eacba3752eaeee5f" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.493909 4863 scope.go:117] "RemoveContainer" containerID="e12b4e2d7441e28e79b82c21835a39f9b0be725411b8a9ff2ec33c001f19759c" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.567724 4863 scope.go:117] "RemoveContainer" containerID="ea89759fce048ed640eaddf232aced0af6ec4897085d8235462ea878b8930c30" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.578653 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerStarted","Data":"e0b0b5599d028046cb04bee6033f2c2fbbd1c374d344ba52629732ea1f6c7c38"} Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.579868 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.644787 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.302393043 podStartE2EDuration="4.644771239s" podCreationTimestamp="2025-12-05 08:22:33 +0000 UTC" firstStartedPulling="2025-12-05 08:22:34.701843651 +0000 UTC m=+5782.427840691" lastFinishedPulling="2025-12-05 08:22:37.044221847 +0000 UTC m=+5784.770218887" observedRunningTime="2025-12-05 08:22:37.641798077 +0000 UTC m=+5785.367795117" watchObservedRunningTime="2025-12-05 08:22:37.644771239 +0000 UTC m=+5785.370768279" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.664561 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-64cnn"] Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.669263 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.670698 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-64cnn"] Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.755935 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-6c01-account-create-update-ww5gs"] Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.757257 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.761818 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.779699 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-6c01-account-create-update-ww5gs"] Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.802622 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzhqd\" (UniqueName: \"kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.802714 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.904772 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzhqd\" (UniqueName: \"kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.904838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.904890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqfmt\" (UniqueName: \"kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.904970 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.905869 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:37 crc kubenswrapper[4863]: I1205 08:22:37.924517 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzhqd\" (UniqueName: \"kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd\") pod \"manila-db-create-64cnn\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " pod="openstack/manila-db-create-64cnn" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.006817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.006875 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqfmt\" (UniqueName: \"kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.007673 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.008021 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.009992 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.011296 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-64cnn" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.024053 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqfmt\" (UniqueName: \"kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt\") pod \"manila-6c01-account-create-update-ww5gs\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.028622 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.102903 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.110744 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.110869 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.110928 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzq5x\" (UniqueName: \"kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.213726 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.213806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzq5x\" (UniqueName: \"kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.213917 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.214563 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.232804 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.236225 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzq5x\" (UniqueName: \"kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x\") pod \"certified-operators-7mbzm\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.336973 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.606074 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-64cnn"] Dec 05 08:22:38 crc kubenswrapper[4863]: W1205 08:22:38.612730 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18b3fb9d_3132_4643_a98f_0aa97954c4a3.slice/crio-9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed WatchSource:0}: Error finding container 9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed: Status 404 returned error can't find the container with id 9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.731689 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-6c01-account-create-update-ww5gs"] Dec 05 08:22:38 crc kubenswrapper[4863]: W1205 08:22:38.733053 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod664dfe89_e4fa_4fe2_97d7_187905492583.slice/crio-0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899 WatchSource:0}: Error finding container 0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899: Status 404 returned error can't find the container with id 0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899 Dec 05 08:22:38 crc kubenswrapper[4863]: I1205 08:22:38.946344 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:38 crc kubenswrapper[4863]: W1205 08:22:38.947664 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaaf5b7b4_4918_43d5_a002_48420c2728b2.slice/crio-ca25489ac0fbc4fd0f1bde768abc1f16f46f1d3f320c1aaac08730adeac77cec WatchSource:0}: Error finding container ca25489ac0fbc4fd0f1bde768abc1f16f46f1d3f320c1aaac08730adeac77cec: Status 404 returned error can't find the container with id ca25489ac0fbc4fd0f1bde768abc1f16f46f1d3f320c1aaac08730adeac77cec Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.610366 4863 generic.go:334] "Generic (PLEG): container finished" podID="18b3fb9d-3132-4643-a98f-0aa97954c4a3" containerID="f26048754260f1f5216dd44300dcd107aaead2c9d8266cb4483e047b677ae4d8" exitCode=0 Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.610604 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-64cnn" event={"ID":"18b3fb9d-3132-4643-a98f-0aa97954c4a3","Type":"ContainerDied","Data":"f26048754260f1f5216dd44300dcd107aaead2c9d8266cb4483e047b677ae4d8"} Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.610789 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-64cnn" event={"ID":"18b3fb9d-3132-4643-a98f-0aa97954c4a3","Type":"ContainerStarted","Data":"9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed"} Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.616111 4863 generic.go:334] "Generic (PLEG): container finished" podID="664dfe89-e4fa-4fe2-97d7-187905492583" containerID="62c4f78fda6c98b1c748c3059677a4cb951368c7b7058ad9d25996ff4342c963" exitCode=0 Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.616215 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6c01-account-create-update-ww5gs" event={"ID":"664dfe89-e4fa-4fe2-97d7-187905492583","Type":"ContainerDied","Data":"62c4f78fda6c98b1c748c3059677a4cb951368c7b7058ad9d25996ff4342c963"} Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.616251 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6c01-account-create-update-ww5gs" event={"ID":"664dfe89-e4fa-4fe2-97d7-187905492583","Type":"ContainerStarted","Data":"0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899"} Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.631222 4863 generic.go:334] "Generic (PLEG): container finished" podID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerID="5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb" exitCode=0 Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.631278 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerDied","Data":"5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb"} Dec 05 08:22:39 crc kubenswrapper[4863]: I1205 08:22:39.631308 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerStarted","Data":"ca25489ac0fbc4fd0f1bde768abc1f16f46f1d3f320c1aaac08730adeac77cec"} Dec 05 08:22:40 crc kubenswrapper[4863]: I1205 08:22:40.642157 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerStarted","Data":"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876"} Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.197805 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-64cnn" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.206236 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.296722 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqfmt\" (UniqueName: \"kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt\") pod \"664dfe89-e4fa-4fe2-97d7-187905492583\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.296901 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts\") pod \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.296955 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzhqd\" (UniqueName: \"kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd\") pod \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\" (UID: \"18b3fb9d-3132-4643-a98f-0aa97954c4a3\") " Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.297012 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts\") pod \"664dfe89-e4fa-4fe2-97d7-187905492583\" (UID: \"664dfe89-e4fa-4fe2-97d7-187905492583\") " Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.298281 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "664dfe89-e4fa-4fe2-97d7-187905492583" (UID: "664dfe89-e4fa-4fe2-97d7-187905492583"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.299761 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "18b3fb9d-3132-4643-a98f-0aa97954c4a3" (UID: "18b3fb9d-3132-4643-a98f-0aa97954c4a3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.305024 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt" (OuterVolumeSpecName: "kube-api-access-jqfmt") pod "664dfe89-e4fa-4fe2-97d7-187905492583" (UID: "664dfe89-e4fa-4fe2-97d7-187905492583"). InnerVolumeSpecName "kube-api-access-jqfmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.306114 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd" (OuterVolumeSpecName: "kube-api-access-bzhqd") pod "18b3fb9d-3132-4643-a98f-0aa97954c4a3" (UID: "18b3fb9d-3132-4643-a98f-0aa97954c4a3"). InnerVolumeSpecName "kube-api-access-bzhqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.400360 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqfmt\" (UniqueName: \"kubernetes.io/projected/664dfe89-e4fa-4fe2-97d7-187905492583-kube-api-access-jqfmt\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.400432 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18b3fb9d-3132-4643-a98f-0aa97954c4a3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.400444 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzhqd\" (UniqueName: \"kubernetes.io/projected/18b3fb9d-3132-4643-a98f-0aa97954c4a3-kube-api-access-bzhqd\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.400458 4863 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/664dfe89-e4fa-4fe2-97d7-187905492583-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.601978 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:22:41 crc kubenswrapper[4863]: E1205 08:22:41.602808 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.652721 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6c01-account-create-update-ww5gs" event={"ID":"664dfe89-e4fa-4fe2-97d7-187905492583","Type":"ContainerDied","Data":"0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899"} Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.652749 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6c01-account-create-update-ww5gs" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.652758 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0baf1e7fcfabc139bb607ec80f06501ded9e82cd30d343c71e8b95655f5e5899" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.654311 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-64cnn" event={"ID":"18b3fb9d-3132-4643-a98f-0aa97954c4a3","Type":"ContainerDied","Data":"9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed"} Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.654357 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e60974d89982e7a950cf1582ab5d8590c30d8bc5a4abcbaa026d851daf968ed" Dec 05 08:22:41 crc kubenswrapper[4863]: I1205 08:22:41.654339 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-64cnn" Dec 05 08:22:42 crc kubenswrapper[4863]: I1205 08:22:42.688241 4863 generic.go:334] "Generic (PLEG): container finished" podID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerID="2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876" exitCode=0 Dec 05 08:22:42 crc kubenswrapper[4863]: I1205 08:22:42.688505 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerDied","Data":"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876"} Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.026565 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-kbgj9"] Dec 05 08:22:43 crc kubenswrapper[4863]: E1205 08:22:43.027047 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18b3fb9d-3132-4643-a98f-0aa97954c4a3" containerName="mariadb-database-create" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.027073 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="18b3fb9d-3132-4643-a98f-0aa97954c4a3" containerName="mariadb-database-create" Dec 05 08:22:43 crc kubenswrapper[4863]: E1205 08:22:43.027095 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664dfe89-e4fa-4fe2-97d7-187905492583" containerName="mariadb-account-create-update" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.027104 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="664dfe89-e4fa-4fe2-97d7-187905492583" containerName="mariadb-account-create-update" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.027344 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="664dfe89-e4fa-4fe2-97d7-187905492583" containerName="mariadb-account-create-update" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.027371 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="18b3fb9d-3132-4643-a98f-0aa97954c4a3" containerName="mariadb-database-create" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.028323 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.040768 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-kbgj9"] Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.077963 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.078148 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-4t6dn" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.145772 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.146138 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.146176 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22kqq\" (UniqueName: \"kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.146442 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.248628 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.248845 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.248883 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.248942 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22kqq\" (UniqueName: \"kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.256209 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.256445 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.265578 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.280838 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22kqq\" (UniqueName: \"kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq\") pod \"manila-db-sync-kbgj9\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.414795 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.737675 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerStarted","Data":"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d"} Dec 05 08:22:43 crc kubenswrapper[4863]: I1205 08:22:43.762721 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7mbzm" podStartSLOduration=3.193605566 podStartE2EDuration="6.762701217s" podCreationTimestamp="2025-12-05 08:22:37 +0000 UTC" firstStartedPulling="2025-12-05 08:22:39.633681839 +0000 UTC m=+5787.359678879" lastFinishedPulling="2025-12-05 08:22:43.20277749 +0000 UTC m=+5790.928774530" observedRunningTime="2025-12-05 08:22:43.755813829 +0000 UTC m=+5791.481810869" watchObservedRunningTime="2025-12-05 08:22:43.762701217 +0000 UTC m=+5791.488698257" Dec 05 08:22:44 crc kubenswrapper[4863]: I1205 08:22:44.631734 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-kbgj9"] Dec 05 08:22:44 crc kubenswrapper[4863]: W1205 08:22:44.635796 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod664a0a78_9cd2_4632_9287_b09fac9bae5d.slice/crio-ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc WatchSource:0}: Error finding container ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc: Status 404 returned error can't find the container with id ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc Dec 05 08:22:44 crc kubenswrapper[4863]: I1205 08:22:44.749771 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-kbgj9" event={"ID":"664a0a78-9cd2-4632-9287-b09fac9bae5d","Type":"ContainerStarted","Data":"ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc"} Dec 05 08:22:48 crc kubenswrapper[4863]: I1205 08:22:48.338101 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:48 crc kubenswrapper[4863]: I1205 08:22:48.338648 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:48 crc kubenswrapper[4863]: I1205 08:22:48.389275 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:48 crc kubenswrapper[4863]: I1205 08:22:48.840464 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:48 crc kubenswrapper[4863]: I1205 08:22:48.897920 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:50 crc kubenswrapper[4863]: I1205 08:22:50.810636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-kbgj9" event={"ID":"664a0a78-9cd2-4632-9287-b09fac9bae5d","Type":"ContainerStarted","Data":"bb4892b646a6577478a336670d2ed45e054f1ccd53f7f9931b5d17e5d4883061"} Dec 05 08:22:50 crc kubenswrapper[4863]: I1205 08:22:50.810784 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7mbzm" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="registry-server" containerID="cri-o://bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d" gracePeriod=2 Dec 05 08:22:50 crc kubenswrapper[4863]: I1205 08:22:50.849751 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-kbgj9" podStartSLOduration=3.307372536 podStartE2EDuration="8.849724347s" podCreationTimestamp="2025-12-05 08:22:42 +0000 UTC" firstStartedPulling="2025-12-05 08:22:44.637838711 +0000 UTC m=+5792.363835751" lastFinishedPulling="2025-12-05 08:22:50.180190522 +0000 UTC m=+5797.906187562" observedRunningTime="2025-12-05 08:22:50.829538598 +0000 UTC m=+5798.555535648" watchObservedRunningTime="2025-12-05 08:22:50.849724347 +0000 UTC m=+5798.575721377" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.285961 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.475049 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzq5x\" (UniqueName: \"kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x\") pod \"aaf5b7b4-4918-43d5-a002-48420c2728b2\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.475204 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content\") pod \"aaf5b7b4-4918-43d5-a002-48420c2728b2\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.475387 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities\") pod \"aaf5b7b4-4918-43d5-a002-48420c2728b2\" (UID: \"aaf5b7b4-4918-43d5-a002-48420c2728b2\") " Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.476726 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities" (OuterVolumeSpecName: "utilities") pod "aaf5b7b4-4918-43d5-a002-48420c2728b2" (UID: "aaf5b7b4-4918-43d5-a002-48420c2728b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.482478 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x" (OuterVolumeSpecName: "kube-api-access-mzq5x") pod "aaf5b7b4-4918-43d5-a002-48420c2728b2" (UID: "aaf5b7b4-4918-43d5-a002-48420c2728b2"). InnerVolumeSpecName "kube-api-access-mzq5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.525966 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aaf5b7b4-4918-43d5-a002-48420c2728b2" (UID: "aaf5b7b4-4918-43d5-a002-48420c2728b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.578319 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.578362 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf5b7b4-4918-43d5-a002-48420c2728b2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.578379 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzq5x\" (UniqueName: \"kubernetes.io/projected/aaf5b7b4-4918-43d5-a002-48420c2728b2-kube-api-access-mzq5x\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.820934 4863 generic.go:334] "Generic (PLEG): container finished" podID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerID="bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d" exitCode=0 Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.820999 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerDied","Data":"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d"} Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.821081 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mbzm" event={"ID":"aaf5b7b4-4918-43d5-a002-48420c2728b2","Type":"ContainerDied","Data":"ca25489ac0fbc4fd0f1bde768abc1f16f46f1d3f320c1aaac08730adeac77cec"} Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.821107 4863 scope.go:117] "RemoveContainer" containerID="bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.821122 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mbzm" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.863031 4863 scope.go:117] "RemoveContainer" containerID="2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.888453 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.906693 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7mbzm"] Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.912008 4863 scope.go:117] "RemoveContainer" containerID="5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.989659 4863 scope.go:117] "RemoveContainer" containerID="bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d" Dec 05 08:22:51 crc kubenswrapper[4863]: E1205 08:22:51.990657 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d\": container with ID starting with bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d not found: ID does not exist" containerID="bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.990707 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d"} err="failed to get container status \"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d\": rpc error: code = NotFound desc = could not find container \"bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d\": container with ID starting with bad05f1eb5cfaf19415c3a61e1eb651aff4b4aafcd10f13ef7de0b6604a7f60d not found: ID does not exist" Dec 05 08:22:51 crc kubenswrapper[4863]: I1205 08:22:51.990732 4863 scope.go:117] "RemoveContainer" containerID="2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876" Dec 05 08:22:52 crc kubenswrapper[4863]: E1205 08:22:52.000940 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876\": container with ID starting with 2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876 not found: ID does not exist" containerID="2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876" Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.001033 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876"} err="failed to get container status \"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876\": rpc error: code = NotFound desc = could not find container \"2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876\": container with ID starting with 2ff16721c832e2738977b3f358f96185851bf78442516301b5483f525f579876 not found: ID does not exist" Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.001097 4863 scope.go:117] "RemoveContainer" containerID="5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb" Dec 05 08:22:52 crc kubenswrapper[4863]: E1205 08:22:52.001575 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb\": container with ID starting with 5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb not found: ID does not exist" containerID="5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb" Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.001628 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb"} err="failed to get container status \"5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb\": rpc error: code = NotFound desc = could not find container \"5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb\": container with ID starting with 5aea457417009e5118a12d11b418e15940597d112e233440d7ff11ca03b8d8bb not found: ID does not exist" Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.612203 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" path="/var/lib/kubelet/pods/aaf5b7b4-4918-43d5-a002-48420c2728b2/volumes" Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.830050 4863 generic.go:334] "Generic (PLEG): container finished" podID="664a0a78-9cd2-4632-9287-b09fac9bae5d" containerID="bb4892b646a6577478a336670d2ed45e054f1ccd53f7f9931b5d17e5d4883061" exitCode=0 Dec 05 08:22:52 crc kubenswrapper[4863]: I1205 08:22:52.830116 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-kbgj9" event={"ID":"664a0a78-9cd2-4632-9287-b09fac9bae5d","Type":"ContainerDied","Data":"bb4892b646a6577478a336670d2ed45e054f1ccd53f7f9931b5d17e5d4883061"} Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.374204 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.537836 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22kqq\" (UniqueName: \"kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq\") pod \"664a0a78-9cd2-4632-9287-b09fac9bae5d\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.539040 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data\") pod \"664a0a78-9cd2-4632-9287-b09fac9bae5d\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.539145 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data\") pod \"664a0a78-9cd2-4632-9287-b09fac9bae5d\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.539228 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle\") pod \"664a0a78-9cd2-4632-9287-b09fac9bae5d\" (UID: \"664a0a78-9cd2-4632-9287-b09fac9bae5d\") " Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.543617 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "664a0a78-9cd2-4632-9287-b09fac9bae5d" (UID: "664a0a78-9cd2-4632-9287-b09fac9bae5d"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.545074 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq" (OuterVolumeSpecName: "kube-api-access-22kqq") pod "664a0a78-9cd2-4632-9287-b09fac9bae5d" (UID: "664a0a78-9cd2-4632-9287-b09fac9bae5d"). InnerVolumeSpecName "kube-api-access-22kqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.546992 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data" (OuterVolumeSpecName: "config-data") pod "664a0a78-9cd2-4632-9287-b09fac9bae5d" (UID: "664a0a78-9cd2-4632-9287-b09fac9bae5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.582402 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "664a0a78-9cd2-4632-9287-b09fac9bae5d" (UID: "664a0a78-9cd2-4632-9287-b09fac9bae5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.642605 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22kqq\" (UniqueName: \"kubernetes.io/projected/664a0a78-9cd2-4632-9287-b09fac9bae5d-kube-api-access-22kqq\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.642653 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.642667 4863 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.642681 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664a0a78-9cd2-4632-9287-b09fac9bae5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.858018 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-kbgj9" event={"ID":"664a0a78-9cd2-4632-9287-b09fac9bae5d","Type":"ContainerDied","Data":"ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc"} Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.858078 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff5acb9e93354d8d301206fedff7b2131fcbdf4f0f5f11cb8bfb52ce11e92abc" Dec 05 08:22:54 crc kubenswrapper[4863]: I1205 08:22:54.858095 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-kbgj9" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.342552 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: E1205 08:22:55.343315 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="extract-content" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343327 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="extract-content" Dec 05 08:22:55 crc kubenswrapper[4863]: E1205 08:22:55.343338 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664a0a78-9cd2-4632-9287-b09fac9bae5d" containerName="manila-db-sync" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343344 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="664a0a78-9cd2-4632-9287-b09fac9bae5d" containerName="manila-db-sync" Dec 05 08:22:55 crc kubenswrapper[4863]: E1205 08:22:55.343358 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="extract-utilities" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343364 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="extract-utilities" Dec 05 08:22:55 crc kubenswrapper[4863]: E1205 08:22:55.343376 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="registry-server" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343381 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="registry-server" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343638 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf5b7b4-4918-43d5-a002-48420c2728b2" containerName="registry-server" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.343650 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="664a0a78-9cd2-4632-9287-b09fac9bae5d" containerName="manila-db-sync" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.345301 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.360038 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.360357 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364620 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-4t6dn" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364620 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364675 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-scripts\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364736 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364829 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364883 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364913 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-ceph\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364928 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.364948 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpvn8\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-kube-api-access-kpvn8\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.371153 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.379872 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.390589 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.413220 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.447459 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.466699 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.466939 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-scripts\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467034 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467113 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75vxf\" (UniqueName: \"kubernetes.io/projected/79867749-f1a2-4945-bc5a-8f58300da928-kube-api-access-75vxf\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467222 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-scripts\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467313 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467386 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467482 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467594 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-ceph\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467665 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467746 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpvn8\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-kube-api-access-kpvn8\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.467893 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.476679 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.476876 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79867749-f1a2-4945-bc5a-8f58300da928-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.471541 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.468423 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.468845 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4b64b9d9-479f-40e8-9ae2-82f00263fe59-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.478640 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.479238 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.483167 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-scripts\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.484448 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.489657 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-ceph\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.499239 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b64b9d9-479f-40e8-9ae2-82f00263fe59-config-data\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.503370 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpvn8\" (UniqueName: \"kubernetes.io/projected/4b64b9d9-479f-40e8-9ae2-82f00263fe59-kube-api-access-kpvn8\") pod \"manila-share-share1-0\" (UID: \"4b64b9d9-479f-40e8-9ae2-82f00263fe59\") " pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.576427 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.582617 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.600646 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.600706 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.600751 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.600791 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.600928 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79867749-f1a2-4945-bc5a-8f58300da928-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.601019 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.601114 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.601147 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h62b\" (UniqueName: \"kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.601220 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75vxf\" (UniqueName: \"kubernetes.io/projected/79867749-f1a2-4945-bc5a-8f58300da928-kube-api-access-75vxf\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.601326 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-scripts\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.607935 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79867749-f1a2-4945-bc5a-8f58300da928-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.619268 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.622529 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.636196 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.671932 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79867749-f1a2-4945-bc5a-8f58300da928-scripts\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.672227 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75vxf\" (UniqueName: \"kubernetes.io/projected/79867749-f1a2-4945-bc5a-8f58300da928-kube-api-access-75vxf\") pod \"manila-scheduler-0\" (UID: \"79867749-f1a2-4945-bc5a-8f58300da928\") " pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.677554 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.679330 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.690057 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:22:55 crc kubenswrapper[4863]: E1205 08:22:55.690318 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.691317 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.702836 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.702921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h62b\" (UniqueName: \"kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.703240 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.703317 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.703362 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.705063 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.705823 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.706134 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.706769 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.709921 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.715719 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.763169 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h62b\" (UniqueName: \"kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b\") pod \"dnsmasq-dns-696f884c85-qct82\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.768993 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805737 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-scripts\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805843 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805863 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff86e47b-ffeb-4075-a297-28f8096416c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805885 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86e47b-ffeb-4075-a297-28f8096416c8-logs\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.805929 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vk6v\" (UniqueName: \"kubernetes.io/projected/ff86e47b-ffeb-4075-a297-28f8096416c8-kube-api-access-5vk6v\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.806013 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data-custom\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.913968 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vk6v\" (UniqueName: \"kubernetes.io/projected/ff86e47b-ffeb-4075-a297-28f8096416c8-kube-api-access-5vk6v\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914066 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data-custom\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914124 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-scripts\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914146 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914189 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff86e47b-ffeb-4075-a297-28f8096416c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914203 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914225 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86e47b-ffeb-4075-a297-28f8096416c8-logs\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.914735 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86e47b-ffeb-4075-a297-28f8096416c8-logs\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.917040 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff86e47b-ffeb-4075-a297-28f8096416c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.921350 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data-custom\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.924149 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-config-data\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.941352 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-scripts\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.942056 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86e47b-ffeb-4075-a297-28f8096416c8-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:55 crc kubenswrapper[4863]: I1205 08:22:55.950521 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vk6v\" (UniqueName: \"kubernetes.io/projected/ff86e47b-ffeb-4075-a297-28f8096416c8-kube-api-access-5vk6v\") pod \"manila-api-0\" (UID: \"ff86e47b-ffeb-4075-a297-28f8096416c8\") " pod="openstack/manila-api-0" Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.077853 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.110465 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.523042 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.531398 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.629945 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.823771 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.890572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4b64b9d9-479f-40e8-9ae2-82f00263fe59","Type":"ContainerStarted","Data":"f5c75053bf0dba4b109d327452f83f2b7487452b2a753dffb39510036dc98c8e"} Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.892925 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"79867749-f1a2-4945-bc5a-8f58300da928","Type":"ContainerStarted","Data":"bbfc9533bd5b147e46fa737f9b9720cb04127d29b73e7db910f0966339c8016b"} Dec 05 08:22:56 crc kubenswrapper[4863]: I1205 08:22:56.902923 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.913269 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"79867749-f1a2-4945-bc5a-8f58300da928","Type":"ContainerStarted","Data":"8bc275641cde81a1fadbfe07034bae2875b2d615af67bc446ab6616e55f8b39c"} Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.918648 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"ff86e47b-ffeb-4075-a297-28f8096416c8","Type":"ContainerStarted","Data":"3a07cbb19e1ff4d1fe4e1b25b881fede81ff06ed69def41fe95cf493f8640cef"} Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.918698 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"ff86e47b-ffeb-4075-a297-28f8096416c8","Type":"ContainerStarted","Data":"9088a397289b8a6a03d4166bdd5ee203691c9b1b11b1461aeabd06967aeb9714"} Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.920930 4863 generic.go:334] "Generic (PLEG): container finished" podID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerID="cd7d2b454cc0aa83daf3bcc42865ee4acb303f5a24bb363916c587f40acdc4e3" exitCode=0 Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.920964 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-696f884c85-qct82" event={"ID":"d8c5e45d-85b6-484b-800c-45e5abd6fcd3","Type":"ContainerDied","Data":"cd7d2b454cc0aa83daf3bcc42865ee4acb303f5a24bb363916c587f40acdc4e3"} Dec 05 08:22:57 crc kubenswrapper[4863]: I1205 08:22:57.920987 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-696f884c85-qct82" event={"ID":"d8c5e45d-85b6-484b-800c-45e5abd6fcd3","Type":"ContainerStarted","Data":"584d1c21a733a76e0c3813471f2a5d754062bac20bcdba906592dee73a39c12e"} Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.932554 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"ff86e47b-ffeb-4075-a297-28f8096416c8","Type":"ContainerStarted","Data":"663c168147b2307a7478203cad79dc9632d06423d7e49504fa00aaa21ba4cd9b"} Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.933131 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.937765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-696f884c85-qct82" event={"ID":"d8c5e45d-85b6-484b-800c-45e5abd6fcd3","Type":"ContainerStarted","Data":"73a91fbd214d25ebe9f32b2591b9bc2fbd46150b4420bd69691756ea4503fda2"} Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.938661 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.943719 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"79867749-f1a2-4945-bc5a-8f58300da928","Type":"ContainerStarted","Data":"098b41ac8325fa54bb8a4a5309321deddc4027b44e93667a24635d67d2c667e4"} Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.959805 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.959789863 podStartE2EDuration="3.959789863s" podCreationTimestamp="2025-12-05 08:22:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.959039345 +0000 UTC m=+5806.685036375" watchObservedRunningTime="2025-12-05 08:22:58.959789863 +0000 UTC m=+5806.685786903" Dec 05 08:22:58 crc kubenswrapper[4863]: I1205 08:22:58.995982 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-696f884c85-qct82" podStartSLOduration=3.995957611 podStartE2EDuration="3.995957611s" podCreationTimestamp="2025-12-05 08:22:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.979013789 +0000 UTC m=+5806.705010829" watchObservedRunningTime="2025-12-05 08:22:58.995957611 +0000 UTC m=+5806.721954651" Dec 05 08:22:59 crc kubenswrapper[4863]: I1205 08:22:59.003102 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.558274931 podStartE2EDuration="4.003085564s" podCreationTimestamp="2025-12-05 08:22:55 +0000 UTC" firstStartedPulling="2025-12-05 08:22:56.522788551 +0000 UTC m=+5804.248785591" lastFinishedPulling="2025-12-05 08:22:56.967599194 +0000 UTC m=+5804.693596224" observedRunningTime="2025-12-05 08:22:58.998628855 +0000 UTC m=+5806.724625895" watchObservedRunningTime="2025-12-05 08:22:59.003085564 +0000 UTC m=+5806.729082604" Dec 05 08:23:04 crc kubenswrapper[4863]: I1205 08:23:04.141804 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:23:05 crc kubenswrapper[4863]: I1205 08:23:05.769727 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 05 08:23:06 crc kubenswrapper[4863]: I1205 08:23:06.079721 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:23:06 crc kubenswrapper[4863]: I1205 08:23:06.178643 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:23:06 crc kubenswrapper[4863]: I1205 08:23:06.178918 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="dnsmasq-dns" containerID="cri-o://717b46cbcaad57f87a99aa506b9db547cf39166ec9324df12b8d44a4d9021091" gracePeriod=10 Dec 05 08:23:06 crc kubenswrapper[4863]: I1205 08:23:06.602368 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:23:06 crc kubenswrapper[4863]: E1205 08:23:06.602982 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.033534 4863 generic.go:334] "Generic (PLEG): container finished" podID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerID="717b46cbcaad57f87a99aa506b9db547cf39166ec9324df12b8d44a4d9021091" exitCode=0 Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.033783 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" event={"ID":"35ea1b31-9203-4ce1-9846-a27fb891bed8","Type":"ContainerDied","Data":"717b46cbcaad57f87a99aa506b9db547cf39166ec9324df12b8d44a4d9021091"} Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.292047 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.461541 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc\") pod \"35ea1b31-9203-4ce1-9846-a27fb891bed8\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.461657 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb\") pod \"35ea1b31-9203-4ce1-9846-a27fb891bed8\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.461725 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb\") pod \"35ea1b31-9203-4ce1-9846-a27fb891bed8\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.461780 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config\") pod \"35ea1b31-9203-4ce1-9846-a27fb891bed8\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.461890 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmqz4\" (UniqueName: \"kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4\") pod \"35ea1b31-9203-4ce1-9846-a27fb891bed8\" (UID: \"35ea1b31-9203-4ce1-9846-a27fb891bed8\") " Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.468198 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4" (OuterVolumeSpecName: "kube-api-access-dmqz4") pod "35ea1b31-9203-4ce1-9846-a27fb891bed8" (UID: "35ea1b31-9203-4ce1-9846-a27fb891bed8"). InnerVolumeSpecName "kube-api-access-dmqz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.523463 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config" (OuterVolumeSpecName: "config") pod "35ea1b31-9203-4ce1-9846-a27fb891bed8" (UID: "35ea1b31-9203-4ce1-9846-a27fb891bed8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.527446 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "35ea1b31-9203-4ce1-9846-a27fb891bed8" (UID: "35ea1b31-9203-4ce1-9846-a27fb891bed8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.527515 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "35ea1b31-9203-4ce1-9846-a27fb891bed8" (UID: "35ea1b31-9203-4ce1-9846-a27fb891bed8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.551748 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "35ea1b31-9203-4ce1-9846-a27fb891bed8" (UID: "35ea1b31-9203-4ce1-9846-a27fb891bed8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.564063 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.564096 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.564105 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.564114 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35ea1b31-9203-4ce1-9846-a27fb891bed8-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:07 crc kubenswrapper[4863]: I1205 08:23:07.564128 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmqz4\" (UniqueName: \"kubernetes.io/projected/35ea1b31-9203-4ce1-9846-a27fb891bed8-kube-api-access-dmqz4\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.044778 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4b64b9d9-479f-40e8-9ae2-82f00263fe59","Type":"ContainerStarted","Data":"1eca9dd8be432083041043064d9336eda336f47a8e019010045e21559cf2f5bc"} Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.045967 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4b64b9d9-479f-40e8-9ae2-82f00263fe59","Type":"ContainerStarted","Data":"990d925337bdcb629773f8ad07d7dc395b4cd7256741ba9b74f77d50460ab6a7"} Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.063721 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" event={"ID":"35ea1b31-9203-4ce1-9846-a27fb891bed8","Type":"ContainerDied","Data":"94cb01dd33ee5b7a3beaded3914ceacf1abc494c63953436801b208bb3c17e5c"} Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.063768 4863 scope.go:117] "RemoveContainer" containerID="717b46cbcaad57f87a99aa506b9db547cf39166ec9324df12b8d44a4d9021091" Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.063861 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db886c5f9-4wqdt" Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.089968 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.673895311 podStartE2EDuration="13.089947179s" podCreationTimestamp="2025-12-05 08:22:55 +0000 UTC" firstStartedPulling="2025-12-05 08:22:56.609108385 +0000 UTC m=+5804.335105425" lastFinishedPulling="2025-12-05 08:23:07.025160263 +0000 UTC m=+5814.751157293" observedRunningTime="2025-12-05 08:23:08.079793512 +0000 UTC m=+5815.805790552" watchObservedRunningTime="2025-12-05 08:23:08.089947179 +0000 UTC m=+5815.815944219" Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.092949 4863 scope.go:117] "RemoveContainer" containerID="2531af9f8fa9dae1bd0196eae7f61ecb7d589aeb855f9c505aa8a6bfee5b6cdb" Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.111121 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.120173 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-db886c5f9-4wqdt"] Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.570951 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.571266 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-central-agent" containerID="cri-o://9c6097e3fcb75151fbbd7720b9a9f9a2d6d0d48c7df4e10bb453d40c8f4a84f7" gracePeriod=30 Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.571345 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="proxy-httpd" containerID="cri-o://e0b0b5599d028046cb04bee6033f2c2fbbd1c374d344ba52629732ea1f6c7c38" gracePeriod=30 Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.571404 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="sg-core" containerID="cri-o://111abe362c7e7481fa57a744d9138f49e38f21dffc29419fd5fbd3d2219b64cd" gracePeriod=30 Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.571454 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-notification-agent" containerID="cri-o://2827fcf4ce2f2d4ce6b508e2b7589b3bdde80708687b10ad5d564ec929ab2d9b" gracePeriod=30 Dec 05 08:23:08 crc kubenswrapper[4863]: I1205 08:23:08.613106 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" path="/var/lib/kubelet/pods/35ea1b31-9203-4ce1-9846-a27fb891bed8/volumes" Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.075212 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b0ed59a-e113-4888-a808-62021a9999e0" containerID="e0b0b5599d028046cb04bee6033f2c2fbbd1c374d344ba52629732ea1f6c7c38" exitCode=0 Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.075902 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b0ed59a-e113-4888-a808-62021a9999e0" containerID="111abe362c7e7481fa57a744d9138f49e38f21dffc29419fd5fbd3d2219b64cd" exitCode=2 Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.075974 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b0ed59a-e113-4888-a808-62021a9999e0" containerID="9c6097e3fcb75151fbbd7720b9a9f9a2d6d0d48c7df4e10bb453d40c8f4a84f7" exitCode=0 Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.075283 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerDied","Data":"e0b0b5599d028046cb04bee6033f2c2fbbd1c374d344ba52629732ea1f6c7c38"} Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.076139 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerDied","Data":"111abe362c7e7481fa57a744d9138f49e38f21dffc29419fd5fbd3d2219b64cd"} Dec 05 08:23:09 crc kubenswrapper[4863]: I1205 08:23:09.076205 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerDied","Data":"9c6097e3fcb75151fbbd7720b9a9f9a2d6d0d48c7df4e10bb453d40c8f4a84f7"} Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.140944 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b0ed59a-e113-4888-a808-62021a9999e0" containerID="2827fcf4ce2f2d4ce6b508e2b7589b3bdde80708687b10ad5d564ec929ab2d9b" exitCode=0 Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.141173 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerDied","Data":"2827fcf4ce2f2d4ce6b508e2b7589b3bdde80708687b10ad5d564ec929ab2d9b"} Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.379672 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492073 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492163 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492278 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492306 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492374 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492450 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bx7zk\" (UniqueName: \"kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.492553 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle\") pod \"6b0ed59a-e113-4888-a808-62021a9999e0\" (UID: \"6b0ed59a-e113-4888-a808-62021a9999e0\") " Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.493056 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.493093 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.497862 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts" (OuterVolumeSpecName: "scripts") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.511843 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk" (OuterVolumeSpecName: "kube-api-access-bx7zk") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "kube-api-access-bx7zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.554843 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.596997 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bx7zk\" (UniqueName: \"kubernetes.io/projected/6b0ed59a-e113-4888-a808-62021a9999e0-kube-api-access-bx7zk\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.597034 4863 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.597049 4863 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.597060 4863 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.597071 4863 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b0ed59a-e113-4888-a808-62021a9999e0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.605699 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.657242 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data" (OuterVolumeSpecName: "config-data") pod "6b0ed59a-e113-4888-a808-62021a9999e0" (UID: "6b0ed59a-e113-4888-a808-62021a9999e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.699387 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:13 crc kubenswrapper[4863]: I1205 08:23:13.699434 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ed59a-e113-4888-a808-62021a9999e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.158799 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b0ed59a-e113-4888-a808-62021a9999e0","Type":"ContainerDied","Data":"5104edf58e4f8b4823f300399fe3b24e3a7f9ad33f328bc5e85d7a8d3458bf63"} Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.158888 4863 scope.go:117] "RemoveContainer" containerID="e0b0b5599d028046cb04bee6033f2c2fbbd1c374d344ba52629732ea1f6c7c38" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.159140 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.202561 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.204217 4863 scope.go:117] "RemoveContainer" containerID="111abe362c7e7481fa57a744d9138f49e38f21dffc29419fd5fbd3d2219b64cd" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.207857 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.236636 4863 scope.go:117] "RemoveContainer" containerID="2827fcf4ce2f2d4ce6b508e2b7589b3bdde80708687b10ad5d564ec929ab2d9b" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.246987 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247534 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-central-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247559 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-central-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247579 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="proxy-httpd" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247585 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="proxy-httpd" Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247602 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="sg-core" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247608 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="sg-core" Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247619 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-notification-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247624 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-notification-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247634 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="init" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247641 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="init" Dec 05 08:23:14 crc kubenswrapper[4863]: E1205 08:23:14.247657 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="dnsmasq-dns" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247663 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="dnsmasq-dns" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247841 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="35ea1b31-9203-4ce1-9846-a27fb891bed8" containerName="dnsmasq-dns" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247866 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="proxy-httpd" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247876 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="sg-core" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247886 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-notification-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.247898 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" containerName="ceilometer-central-agent" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.249824 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.255805 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.256619 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.256838 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.257874 4863 scope.go:117] "RemoveContainer" containerID="9c6097e3fcb75151fbbd7720b9a9f9a2d6d0d48c7df4e10bb453d40c8f4a84f7" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.310451 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.310521 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gsts\" (UniqueName: \"kubernetes.io/projected/0e91d28d-aeed-46b7-9579-327651062fb6-kube-api-access-2gsts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.310563 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.310877 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.310967 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-config-data\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.311071 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-scripts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.311161 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.412900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-scripts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.412967 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413049 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413075 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gsts\" (UniqueName: \"kubernetes.io/projected/0e91d28d-aeed-46b7-9579-327651062fb6-kube-api-access-2gsts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413093 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413184 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413222 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-config-data\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413728 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.413971 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e91d28d-aeed-46b7-9579-327651062fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.421412 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-scripts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.427121 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.427237 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.429460 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gsts\" (UniqueName: \"kubernetes.io/projected/0e91d28d-aeed-46b7-9579-327651062fb6-kube-api-access-2gsts\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.429496 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e91d28d-aeed-46b7-9579-327651062fb6-config-data\") pod \"ceilometer-0\" (UID: \"0e91d28d-aeed-46b7-9579-327651062fb6\") " pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.565806 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:23:14 crc kubenswrapper[4863]: I1205 08:23:14.627835 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b0ed59a-e113-4888-a808-62021a9999e0" path="/var/lib/kubelet/pods/6b0ed59a-e113-4888-a808-62021a9999e0/volumes" Dec 05 08:23:15 crc kubenswrapper[4863]: I1205 08:23:15.142144 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:23:15 crc kubenswrapper[4863]: I1205 08:23:15.169697 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e91d28d-aeed-46b7-9579-327651062fb6","Type":"ContainerStarted","Data":"70fbaa8516b2c3510c725190de477fd3545c95ca4618cad5486019d236a45c7b"} Dec 05 08:23:15 crc kubenswrapper[4863]: I1205 08:23:15.716748 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 05 08:23:16 crc kubenswrapper[4863]: I1205 08:23:16.185856 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e91d28d-aeed-46b7-9579-327651062fb6","Type":"ContainerStarted","Data":"38cad0e75f44fa87f1d346536be41a4a986b24e43f22ba69313e151366993b72"} Dec 05 08:23:16 crc kubenswrapper[4863]: I1205 08:23:16.185898 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e91d28d-aeed-46b7-9579-327651062fb6","Type":"ContainerStarted","Data":"56dace947acbec27a8524a0df0342c2af054b63450b01f3eb2b0fc5f9473ae14"} Dec 05 08:23:17 crc kubenswrapper[4863]: I1205 08:23:17.195823 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e91d28d-aeed-46b7-9579-327651062fb6","Type":"ContainerStarted","Data":"ccf2fab040f139d34bb2b48cc0d242d4562ad89c02e68bc7ecb5e0a29ff1c340"} Dec 05 08:23:17 crc kubenswrapper[4863]: I1205 08:23:17.472622 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 05 08:23:17 crc kubenswrapper[4863]: I1205 08:23:17.748512 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 05 08:23:18 crc kubenswrapper[4863]: I1205 08:23:18.207231 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e91d28d-aeed-46b7-9579-327651062fb6","Type":"ContainerStarted","Data":"27f4952625f147e1cae2c3b2e90f72d72861538f2f8fb479b5142b6f1cd6b638"} Dec 05 08:23:18 crc kubenswrapper[4863]: I1205 08:23:18.207544 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:23:18 crc kubenswrapper[4863]: I1205 08:23:18.240082 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.605773634 podStartE2EDuration="4.240055363s" podCreationTimestamp="2025-12-05 08:23:14 +0000 UTC" firstStartedPulling="2025-12-05 08:23:15.150043246 +0000 UTC m=+5822.876040286" lastFinishedPulling="2025-12-05 08:23:17.784324975 +0000 UTC m=+5825.510322015" observedRunningTime="2025-12-05 08:23:18.2341391 +0000 UTC m=+5825.960136140" watchObservedRunningTime="2025-12-05 08:23:18.240055363 +0000 UTC m=+5825.966052413" Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.043105 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-2tshl"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.053684 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-2tshl"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.066419 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-9xr4l"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.079523 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-07b0-account-create-update-x2zbm"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.089883 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-9xr4l"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.097971 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-07b0-account-create-update-x2zbm"] Dec 05 08:23:19 crc kubenswrapper[4863]: I1205 08:23:19.602537 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:23:19 crc kubenswrapper[4863]: E1205 08:23:19.604172 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.035622 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-w7pv2"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.046312 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-8a5f-account-create-update-jpqxj"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.059723 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8aba-account-create-update-vznb6"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.069493 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-w7pv2"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.077551 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-8a5f-account-create-update-jpqxj"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.085627 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8aba-account-create-update-vznb6"] Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.616321 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05f557d8-488b-401a-9827-6210361e103e" path="/var/lib/kubelet/pods/05f557d8-488b-401a-9827-6210361e103e/volumes" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.617638 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22059a65-1c16-4835-af0c-791ed85bd701" path="/var/lib/kubelet/pods/22059a65-1c16-4835-af0c-791ed85bd701/volumes" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.620458 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5" path="/var/lib/kubelet/pods/4ad4b25a-49e0-4cb2-825c-3e1b9b0caae5/volumes" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.621859 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7adaea6c-32b1-4fb5-898e-d735014b35ef" path="/var/lib/kubelet/pods/7adaea6c-32b1-4fb5-898e-d735014b35ef/volumes" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.623355 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894c218c-8862-4e20-b273-302d259e3964" path="/var/lib/kubelet/pods/894c218c-8862-4e20-b273-302d259e3964/volumes" Dec 05 08:23:20 crc kubenswrapper[4863]: I1205 08:23:20.624615 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c25172c1-e63a-450c-beea-bfe6097c1168" path="/var/lib/kubelet/pods/c25172c1-e63a-450c-beea-bfe6097c1168/volumes" Dec 05 08:23:27 crc kubenswrapper[4863]: I1205 08:23:27.461095 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 05 08:23:34 crc kubenswrapper[4863]: I1205 08:23:34.601949 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:23:34 crc kubenswrapper[4863]: E1205 08:23:34.602922 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:23:37 crc kubenswrapper[4863]: I1205 08:23:37.812898 4863 scope.go:117] "RemoveContainer" containerID="0d730b1858339edb2a3035434a38d8b8dcaa638acb8183a47bf84a4d4b2ee0dd" Dec 05 08:23:37 crc kubenswrapper[4863]: I1205 08:23:37.837517 4863 scope.go:117] "RemoveContainer" containerID="0bca5de5abc083cd5717ec445b2b41e59a1efa151fcf1f73a71d5ca329c8b47e" Dec 05 08:23:37 crc kubenswrapper[4863]: I1205 08:23:37.892520 4863 scope.go:117] "RemoveContainer" containerID="74bb233a30890e9c078e9c47922ade2f0273e91f06c4c8fbf2a5de884ef66059" Dec 05 08:23:37 crc kubenswrapper[4863]: I1205 08:23:37.937733 4863 scope.go:117] "RemoveContainer" containerID="c17d6f9870388d661caf3c9dd879347966564b06bddf7c4098ac043dea61482f" Dec 05 08:23:37 crc kubenswrapper[4863]: I1205 08:23:37.984194 4863 scope.go:117] "RemoveContainer" containerID="d9a46913d3b5a23f296e05cd9a538d8af277316c57dc1fcbc10f7782d873a45c" Dec 05 08:23:38 crc kubenswrapper[4863]: I1205 08:23:38.045518 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-czmxf"] Dec 05 08:23:38 crc kubenswrapper[4863]: I1205 08:23:38.052291 4863 scope.go:117] "RemoveContainer" containerID="df7d5da99c4df9017102581407ad900bb4158494a70e5dce32fc160578235232" Dec 05 08:23:38 crc kubenswrapper[4863]: I1205 08:23:38.058823 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-czmxf"] Dec 05 08:23:38 crc kubenswrapper[4863]: I1205 08:23:38.614794 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="236039b4-23cb-4694-a2f2-e6c5c8b10215" path="/var/lib/kubelet/pods/236039b4-23cb-4694-a2f2-e6c5c8b10215/volumes" Dec 05 08:23:44 crc kubenswrapper[4863]: I1205 08:23:44.572073 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:23:47 crc kubenswrapper[4863]: I1205 08:23:47.601990 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:23:47 crc kubenswrapper[4863]: E1205 08:23:47.602640 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:23:56 crc kubenswrapper[4863]: I1205 08:23:56.047631 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vbsxf"] Dec 05 08:23:56 crc kubenswrapper[4863]: I1205 08:23:56.056836 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vbsxf"] Dec 05 08:23:56 crc kubenswrapper[4863]: I1205 08:23:56.631217 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f3a7e74-9e74-480a-9232-a7b789c218d6" path="/var/lib/kubelet/pods/9f3a7e74-9e74-480a-9232-a7b789c218d6/volumes" Dec 05 08:23:58 crc kubenswrapper[4863]: I1205 08:23:58.032089 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-trgdq"] Dec 05 08:23:58 crc kubenswrapper[4863]: I1205 08:23:58.042993 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-trgdq"] Dec 05 08:23:58 crc kubenswrapper[4863]: I1205 08:23:58.613894 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60035b0-2cb5-4329-973f-8ff053e9a3b3" path="/var/lib/kubelet/pods/e60035b0-2cb5-4329-973f-8ff053e9a3b3/volumes" Dec 05 08:24:00 crc kubenswrapper[4863]: I1205 08:24:00.602644 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:24:00 crc kubenswrapper[4863]: E1205 08:24:00.604514 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:24:03 crc kubenswrapper[4863]: I1205 08:24:03.979631 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:03 crc kubenswrapper[4863]: I1205 08:24:03.983071 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.010292 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.156436 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.156569 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.156717 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wg9p\" (UniqueName: \"kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.258210 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wg9p\" (UniqueName: \"kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.258682 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.259096 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.259163 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.259390 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.284808 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wg9p\" (UniqueName: \"kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p\") pod \"community-operators-bz9hd\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.304983 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:04 crc kubenswrapper[4863]: W1205 08:24:04.766353 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d6deebd_c8b3_46e1_9626_b635a9302784.slice/crio-f4eb46d2d7c8b33b623b2c8c2af8097f92add39a4080edf26a1fda6803782002 WatchSource:0}: Error finding container f4eb46d2d7c8b33b623b2c8c2af8097f92add39a4080edf26a1fda6803782002: Status 404 returned error can't find the container with id f4eb46d2d7c8b33b623b2c8c2af8097f92add39a4080edf26a1fda6803782002 Dec 05 08:24:04 crc kubenswrapper[4863]: I1205 08:24:04.766441 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:05 crc kubenswrapper[4863]: I1205 08:24:05.713098 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerID="3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68" exitCode=0 Dec 05 08:24:05 crc kubenswrapper[4863]: I1205 08:24:05.713429 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerDied","Data":"3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68"} Dec 05 08:24:05 crc kubenswrapper[4863]: I1205 08:24:05.713555 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerStarted","Data":"f4eb46d2d7c8b33b623b2c8c2af8097f92add39a4080edf26a1fda6803782002"} Dec 05 08:24:06 crc kubenswrapper[4863]: I1205 08:24:06.725281 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerStarted","Data":"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1"} Dec 05 08:24:07 crc kubenswrapper[4863]: I1205 08:24:07.743629 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerID="afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1" exitCode=0 Dec 05 08:24:07 crc kubenswrapper[4863]: I1205 08:24:07.743772 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerDied","Data":"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1"} Dec 05 08:24:08 crc kubenswrapper[4863]: I1205 08:24:08.757090 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerStarted","Data":"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5"} Dec 05 08:24:08 crc kubenswrapper[4863]: I1205 08:24:08.776750 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bz9hd" podStartSLOduration=3.224093797 podStartE2EDuration="5.776727755s" podCreationTimestamp="2025-12-05 08:24:03 +0000 UTC" firstStartedPulling="2025-12-05 08:24:05.715567629 +0000 UTC m=+5873.441564679" lastFinishedPulling="2025-12-05 08:24:08.268201587 +0000 UTC m=+5875.994198637" observedRunningTime="2025-12-05 08:24:08.772101213 +0000 UTC m=+5876.498098273" watchObservedRunningTime="2025-12-05 08:24:08.776727755 +0000 UTC m=+5876.502724795" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.170446 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.176485 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.180978 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.182147 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.312938 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.313014 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.313089 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.313118 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.313141 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc8pw\" (UniqueName: \"kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.313166 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.414817 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.414899 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.414985 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.415017 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.415048 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc8pw\" (UniqueName: \"kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.415080 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.416568 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.416589 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.416570 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.416855 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.416887 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.434921 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc8pw\" (UniqueName: \"kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw\") pod \"dnsmasq-dns-fc7dfc8ff-hk4v4\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:11 crc kubenswrapper[4863]: I1205 08:24:11.493927 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:12 crc kubenswrapper[4863]: I1205 08:24:12.008320 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:12 crc kubenswrapper[4863]: W1205 08:24:12.024688 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod793c0030_7ab4_41a3_923b_192c80b1ed04.slice/crio-9ab4833f05f01802824f36bb58f83d723ba19cddd0387667c24bceba1627eb4f WatchSource:0}: Error finding container 9ab4833f05f01802824f36bb58f83d723ba19cddd0387667c24bceba1627eb4f: Status 404 returned error can't find the container with id 9ab4833f05f01802824f36bb58f83d723ba19cddd0387667c24bceba1627eb4f Dec 05 08:24:12 crc kubenswrapper[4863]: I1205 08:24:12.805129 4863 generic.go:334] "Generic (PLEG): container finished" podID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerID="220213f3e0620520e97d50d814f599d9a140e08fa7b39fbc866e9873fa47f300" exitCode=0 Dec 05 08:24:12 crc kubenswrapper[4863]: I1205 08:24:12.805257 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" event={"ID":"793c0030-7ab4-41a3-923b-192c80b1ed04","Type":"ContainerDied","Data":"220213f3e0620520e97d50d814f599d9a140e08fa7b39fbc866e9873fa47f300"} Dec 05 08:24:12 crc kubenswrapper[4863]: I1205 08:24:12.805665 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" event={"ID":"793c0030-7ab4-41a3-923b-192c80b1ed04","Type":"ContainerStarted","Data":"9ab4833f05f01802824f36bb58f83d723ba19cddd0387667c24bceba1627eb4f"} Dec 05 08:24:13 crc kubenswrapper[4863]: I1205 08:24:13.819454 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" event={"ID":"793c0030-7ab4-41a3-923b-192c80b1ed04","Type":"ContainerStarted","Data":"a8952d62c180e1f84e5a8b7b0ae81caff566e100b8febb2c3f98c36af55545a3"} Dec 05 08:24:13 crc kubenswrapper[4863]: I1205 08:24:13.819874 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:13 crc kubenswrapper[4863]: I1205 08:24:13.852843 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" podStartSLOduration=2.852821543 podStartE2EDuration="2.852821543s" podCreationTimestamp="2025-12-05 08:24:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:24:13.84237524 +0000 UTC m=+5881.568372360" watchObservedRunningTime="2025-12-05 08:24:13.852821543 +0000 UTC m=+5881.578818583" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.306693 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.306734 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.352042 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.602722 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:24:14 crc kubenswrapper[4863]: E1205 08:24:14.603315 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.876407 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:14 crc kubenswrapper[4863]: I1205 08:24:14.931105 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:15 crc kubenswrapper[4863]: I1205 08:24:15.033734 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-xhsv5"] Dec 05 08:24:15 crc kubenswrapper[4863]: I1205 08:24:15.049353 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-xhsv5"] Dec 05 08:24:16 crc kubenswrapper[4863]: I1205 08:24:16.616652 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59cd1339-d5a8-41ac-aed3-9148fd26816a" path="/var/lib/kubelet/pods/59cd1339-d5a8-41ac-aed3-9148fd26816a/volumes" Dec 05 08:24:16 crc kubenswrapper[4863]: I1205 08:24:16.842958 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bz9hd" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="registry-server" containerID="cri-o://31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5" gracePeriod=2 Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.366567 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.440895 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wg9p\" (UniqueName: \"kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p\") pod \"5d6deebd-c8b3-46e1-9626-b635a9302784\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.440965 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities\") pod \"5d6deebd-c8b3-46e1-9626-b635a9302784\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.441147 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content\") pod \"5d6deebd-c8b3-46e1-9626-b635a9302784\" (UID: \"5d6deebd-c8b3-46e1-9626-b635a9302784\") " Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.442258 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities" (OuterVolumeSpecName: "utilities") pod "5d6deebd-c8b3-46e1-9626-b635a9302784" (UID: "5d6deebd-c8b3-46e1-9626-b635a9302784"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.442494 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.448100 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p" (OuterVolumeSpecName: "kube-api-access-2wg9p") pod "5d6deebd-c8b3-46e1-9626-b635a9302784" (UID: "5d6deebd-c8b3-46e1-9626-b635a9302784"). InnerVolumeSpecName "kube-api-access-2wg9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.503622 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d6deebd-c8b3-46e1-9626-b635a9302784" (UID: "5d6deebd-c8b3-46e1-9626-b635a9302784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.544952 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6deebd-c8b3-46e1-9626-b635a9302784-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.544992 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wg9p\" (UniqueName: \"kubernetes.io/projected/5d6deebd-c8b3-46e1-9626-b635a9302784-kube-api-access-2wg9p\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.858561 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerID="31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5" exitCode=0 Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.858704 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bz9hd" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.858739 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerDied","Data":"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5"} Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.859118 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bz9hd" event={"ID":"5d6deebd-c8b3-46e1-9626-b635a9302784","Type":"ContainerDied","Data":"f4eb46d2d7c8b33b623b2c8c2af8097f92add39a4080edf26a1fda6803782002"} Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.859146 4863 scope.go:117] "RemoveContainer" containerID="31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.923495 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.927659 4863 scope.go:117] "RemoveContainer" containerID="afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1" Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.938247 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bz9hd"] Dec 05 08:24:17 crc kubenswrapper[4863]: I1205 08:24:17.957597 4863 scope.go:117] "RemoveContainer" containerID="3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.018008 4863 scope.go:117] "RemoveContainer" containerID="31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5" Dec 05 08:24:18 crc kubenswrapper[4863]: E1205 08:24:18.018550 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5\": container with ID starting with 31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5 not found: ID does not exist" containerID="31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.018580 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5"} err="failed to get container status \"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5\": rpc error: code = NotFound desc = could not find container \"31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5\": container with ID starting with 31cdb407c94ffe2b4854848b6177f85bcc39d7550cca6c9c65ad20a3f117d9f5 not found: ID does not exist" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.018599 4863 scope.go:117] "RemoveContainer" containerID="afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1" Dec 05 08:24:18 crc kubenswrapper[4863]: E1205 08:24:18.018993 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1\": container with ID starting with afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1 not found: ID does not exist" containerID="afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.019010 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1"} err="failed to get container status \"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1\": rpc error: code = NotFound desc = could not find container \"afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1\": container with ID starting with afcd874130d715092672071edb7aef13187a14b98617fa80cacc70c7ff1d22f1 not found: ID does not exist" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.019023 4863 scope.go:117] "RemoveContainer" containerID="3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68" Dec 05 08:24:18 crc kubenswrapper[4863]: E1205 08:24:18.019411 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68\": container with ID starting with 3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68 not found: ID does not exist" containerID="3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.019521 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68"} err="failed to get container status \"3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68\": rpc error: code = NotFound desc = could not find container \"3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68\": container with ID starting with 3c55a2fd016962fc045b2af4dbbd0ccafa2f36b680bb28ce3aabfdfaeb93ce68 not found: ID does not exist" Dec 05 08:24:18 crc kubenswrapper[4863]: I1205 08:24:18.618803 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" path="/var/lib/kubelet/pods/5d6deebd-c8b3-46e1-9626-b635a9302784/volumes" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.495635 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.591785 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.592058 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-696f884c85-qct82" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="dnsmasq-dns" containerID="cri-o://73a91fbd214d25ebe9f32b2591b9bc2fbd46150b4420bd69691756ea4503fda2" gracePeriod=10 Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.771233 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698794d99-lq2mw"] Dec 05 08:24:21 crc kubenswrapper[4863]: E1205 08:24:21.771982 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="extract-utilities" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.772003 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="extract-utilities" Dec 05 08:24:21 crc kubenswrapper[4863]: E1205 08:24:21.772018 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="registry-server" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.772025 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="registry-server" Dec 05 08:24:21 crc kubenswrapper[4863]: E1205 08:24:21.772041 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="extract-content" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.772048 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="extract-content" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.772231 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6deebd-c8b3-46e1-9626-b635a9302784" containerName="registry-server" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.773333 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.801921 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698794d99-lq2mw"] Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.841988 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcskd\" (UniqueName: \"kubernetes.io/projected/e381f105-1a3a-4d04-a429-3e1627de35de-kube-api-access-pcskd\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.842061 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-config\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.842099 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-openstack-cell1\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.842131 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-sb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.842176 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-dns-svc\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.842204 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-nb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.895397 4863 generic.go:334] "Generic (PLEG): container finished" podID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerID="73a91fbd214d25ebe9f32b2591b9bc2fbd46150b4420bd69691756ea4503fda2" exitCode=0 Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.895443 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-696f884c85-qct82" event={"ID":"d8c5e45d-85b6-484b-800c-45e5abd6fcd3","Type":"ContainerDied","Data":"73a91fbd214d25ebe9f32b2591b9bc2fbd46150b4420bd69691756ea4503fda2"} Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943553 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcskd\" (UniqueName: \"kubernetes.io/projected/e381f105-1a3a-4d04-a429-3e1627de35de-kube-api-access-pcskd\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943634 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-config\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943666 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-openstack-cell1\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943693 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-sb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943738 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-dns-svc\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.943758 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-nb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.944559 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-nb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.944623 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-openstack-cell1\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.944861 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-config\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.945050 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-ovsdbserver-sb\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.945376 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e381f105-1a3a-4d04-a429-3e1627de35de-dns-svc\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:21 crc kubenswrapper[4863]: I1205 08:24:21.965872 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcskd\" (UniqueName: \"kubernetes.io/projected/e381f105-1a3a-4d04-a429-3e1627de35de-kube-api-access-pcskd\") pod \"dnsmasq-dns-698794d99-lq2mw\" (UID: \"e381f105-1a3a-4d04-a429-3e1627de35de\") " pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.093792 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.094201 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.147379 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb\") pod \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.147448 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config\") pod \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.147530 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc\") pod \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.148789 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb\") pod \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.148886 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h62b\" (UniqueName: \"kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b\") pod \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\" (UID: \"d8c5e45d-85b6-484b-800c-45e5abd6fcd3\") " Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.153122 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b" (OuterVolumeSpecName: "kube-api-access-7h62b") pod "d8c5e45d-85b6-484b-800c-45e5abd6fcd3" (UID: "d8c5e45d-85b6-484b-800c-45e5abd6fcd3"). InnerVolumeSpecName "kube-api-access-7h62b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.210008 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config" (OuterVolumeSpecName: "config") pod "d8c5e45d-85b6-484b-800c-45e5abd6fcd3" (UID: "d8c5e45d-85b6-484b-800c-45e5abd6fcd3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.235892 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d8c5e45d-85b6-484b-800c-45e5abd6fcd3" (UID: "d8c5e45d-85b6-484b-800c-45e5abd6fcd3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.242057 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d8c5e45d-85b6-484b-800c-45e5abd6fcd3" (UID: "d8c5e45d-85b6-484b-800c-45e5abd6fcd3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.245096 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d8c5e45d-85b6-484b-800c-45e5abd6fcd3" (UID: "d8c5e45d-85b6-484b-800c-45e5abd6fcd3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.251172 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.251222 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h62b\" (UniqueName: \"kubernetes.io/projected/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-kube-api-access-7h62b\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.251234 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.251245 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.251255 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8c5e45d-85b6-484b-800c-45e5abd6fcd3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.588289 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698794d99-lq2mw"] Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.908913 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-696f884c85-qct82" event={"ID":"d8c5e45d-85b6-484b-800c-45e5abd6fcd3","Type":"ContainerDied","Data":"584d1c21a733a76e0c3813471f2a5d754062bac20bcdba906592dee73a39c12e"} Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.908955 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-696f884c85-qct82" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.908963 4863 scope.go:117] "RemoveContainer" containerID="73a91fbd214d25ebe9f32b2591b9bc2fbd46150b4420bd69691756ea4503fda2" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.913878 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698794d99-lq2mw" event={"ID":"e381f105-1a3a-4d04-a429-3e1627de35de","Type":"ContainerStarted","Data":"07738346aab2e6e5d8044f0d86eae9e10d411b6b88b49f3a115b4d43d3d44582"} Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.938756 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.942287 4863 scope.go:117] "RemoveContainer" containerID="cd7d2b454cc0aa83daf3bcc42865ee4acb303f5a24bb363916c587f40acdc4e3" Dec 05 08:24:22 crc kubenswrapper[4863]: I1205 08:24:22.951755 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-696f884c85-qct82"] Dec 05 08:24:23 crc kubenswrapper[4863]: I1205 08:24:23.927013 4863 generic.go:334] "Generic (PLEG): container finished" podID="e381f105-1a3a-4d04-a429-3e1627de35de" containerID="da5c53e712079be8179c959f86bc31db06139b7bc1f2bea8735ed343788b0b01" exitCode=0 Dec 05 08:24:23 crc kubenswrapper[4863]: I1205 08:24:23.927132 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698794d99-lq2mw" event={"ID":"e381f105-1a3a-4d04-a429-3e1627de35de","Type":"ContainerDied","Data":"da5c53e712079be8179c959f86bc31db06139b7bc1f2bea8735ed343788b0b01"} Dec 05 08:24:24 crc kubenswrapper[4863]: I1205 08:24:24.615992 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" path="/var/lib/kubelet/pods/d8c5e45d-85b6-484b-800c-45e5abd6fcd3/volumes" Dec 05 08:24:24 crc kubenswrapper[4863]: I1205 08:24:24.939031 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698794d99-lq2mw" event={"ID":"e381f105-1a3a-4d04-a429-3e1627de35de","Type":"ContainerStarted","Data":"66445fbdf6e3a53ce1b351c144cd4496d640dd7506d88e4fb96218b1ee9be25a"} Dec 05 08:24:24 crc kubenswrapper[4863]: I1205 08:24:24.939153 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:24 crc kubenswrapper[4863]: I1205 08:24:24.960786 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698794d99-lq2mw" podStartSLOduration=3.960766279 podStartE2EDuration="3.960766279s" podCreationTimestamp="2025-12-05 08:24:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:24:24.958905144 +0000 UTC m=+5892.684902194" watchObservedRunningTime="2025-12-05 08:24:24.960766279 +0000 UTC m=+5892.686763319" Dec 05 08:24:25 crc kubenswrapper[4863]: I1205 08:24:25.602725 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:24:25 crc kubenswrapper[4863]: E1205 08:24:25.603139 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.376147 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp"] Dec 05 08:24:28 crc kubenswrapper[4863]: E1205 08:24:28.376956 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="dnsmasq-dns" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.376973 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="dnsmasq-dns" Dec 05 08:24:28 crc kubenswrapper[4863]: E1205 08:24:28.377017 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="init" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.377024 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="init" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.377252 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8c5e45d-85b6-484b-800c-45e5abd6fcd3" containerName="dnsmasq-dns" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.378433 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.383341 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.385633 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.385643 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.386033 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.412888 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp"] Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.508576 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.508625 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.508728 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.508788 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl9cq\" (UniqueName: \"kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.508814 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.609859 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.609932 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl9cq\" (UniqueName: \"kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.609959 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.610030 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.610051 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.616911 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.619103 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.619196 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.619731 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.626514 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl9cq\" (UniqueName: \"kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c549tp\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:28 crc kubenswrapper[4863]: I1205 08:24:28.716573 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:29 crc kubenswrapper[4863]: I1205 08:24:29.286411 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp"] Dec 05 08:24:29 crc kubenswrapper[4863]: W1205 08:24:29.293377 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod178bee9c_f288_4ca1_ac2a_d8295a776458.slice/crio-3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba WatchSource:0}: Error finding container 3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba: Status 404 returned error can't find the container with id 3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba Dec 05 08:24:29 crc kubenswrapper[4863]: I1205 08:24:29.999065 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" event={"ID":"178bee9c-f288-4ca1-ac2a-d8295a776458","Type":"ContainerStarted","Data":"3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba"} Dec 05 08:24:32 crc kubenswrapper[4863]: I1205 08:24:32.096351 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698794d99-lq2mw" Dec 05 08:24:32 crc kubenswrapper[4863]: I1205 08:24:32.163859 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:32 crc kubenswrapper[4863]: I1205 08:24:32.164125 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="dnsmasq-dns" containerID="cri-o://a8952d62c180e1f84e5a8b7b0ae81caff566e100b8febb2c3f98c36af55545a3" gracePeriod=10 Dec 05 08:24:33 crc kubenswrapper[4863]: I1205 08:24:33.029977 4863 generic.go:334] "Generic (PLEG): container finished" podID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerID="a8952d62c180e1f84e5a8b7b0ae81caff566e100b8febb2c3f98c36af55545a3" exitCode=0 Dec 05 08:24:33 crc kubenswrapper[4863]: I1205 08:24:33.030595 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" event={"ID":"793c0030-7ab4-41a3-923b-192c80b1ed04","Type":"ContainerDied","Data":"a8952d62c180e1f84e5a8b7b0ae81caff566e100b8febb2c3f98c36af55545a3"} Dec 05 08:24:36 crc kubenswrapper[4863]: I1205 08:24:36.495685 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.125:5353: connect: connection refused" Dec 05 08:24:36 crc kubenswrapper[4863]: I1205 08:24:36.605384 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:24:36 crc kubenswrapper[4863]: E1205 08:24:36.605695 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.265877 4863 scope.go:117] "RemoveContainer" containerID="dc3696c23e735e4d10e0f7dfc5ec4c84c16b7dad66024a5a1d121806458d9b56" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.316356 4863 scope.go:117] "RemoveContainer" containerID="d0f2744964eb39cbced247c4e45d638c851588a1f702a8031e46f85956d220a5" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.358293 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.369658 4863 scope.go:117] "RemoveContainer" containerID="40e6ecd9c6c336d33768c2a0f3fcba5fc37583a4f23872519d452200756fbf64" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.427242 4863 scope.go:117] "RemoveContainer" containerID="66e1931c38ce932f4eb9f0ce039af457f10b365d12498bb36ec54baf2a1d54e9" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.520582 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.520731 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.520905 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.520986 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xc8pw\" (UniqueName: \"kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.521072 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.521128 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc\") pod \"793c0030-7ab4-41a3-923b-192c80b1ed04\" (UID: \"793c0030-7ab4-41a3-923b-192c80b1ed04\") " Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.525590 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw" (OuterVolumeSpecName: "kube-api-access-xc8pw") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "kube-api-access-xc8pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.569000 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.570373 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.590640 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.590708 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.592243 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config" (OuterVolumeSpecName: "config") pod "793c0030-7ab4-41a3-923b-192c80b1ed04" (UID: "793c0030-7ab4-41a3-923b-192c80b1ed04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623810 4863 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623832 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623843 4863 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623852 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xc8pw\" (UniqueName: \"kubernetes.io/projected/793c0030-7ab4-41a3-923b-192c80b1ed04-kube-api-access-xc8pw\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623860 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:38 crc kubenswrapper[4863]: I1205 08:24:38.623868 4863 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/793c0030-7ab4-41a3-923b-192c80b1ed04-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.140506 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" event={"ID":"793c0030-7ab4-41a3-923b-192c80b1ed04","Type":"ContainerDied","Data":"9ab4833f05f01802824f36bb58f83d723ba19cddd0387667c24bceba1627eb4f"} Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.140764 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc7dfc8ff-hk4v4" Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.140858 4863 scope.go:117] "RemoveContainer" containerID="a8952d62c180e1f84e5a8b7b0ae81caff566e100b8febb2c3f98c36af55545a3" Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.143230 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" event={"ID":"178bee9c-f288-4ca1-ac2a-d8295a776458","Type":"ContainerStarted","Data":"3442edd070f6e84c4c25fc544727bd0b2462af7973aa451c938f68129674c694"} Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.170374 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" podStartSLOduration=2.337173593 podStartE2EDuration="11.170359254s" podCreationTimestamp="2025-12-05 08:24:28 +0000 UTC" firstStartedPulling="2025-12-05 08:24:29.296371009 +0000 UTC m=+5897.022368059" lastFinishedPulling="2025-12-05 08:24:38.12955666 +0000 UTC m=+5905.855553720" observedRunningTime="2025-12-05 08:24:39.168801277 +0000 UTC m=+5906.894798357" watchObservedRunningTime="2025-12-05 08:24:39.170359254 +0000 UTC m=+5906.896356294" Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.174803 4863 scope.go:117] "RemoveContainer" containerID="220213f3e0620520e97d50d814f599d9a140e08fa7b39fbc866e9873fa47f300" Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.195177 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:39 crc kubenswrapper[4863]: I1205 08:24:39.206944 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fc7dfc8ff-hk4v4"] Dec 05 08:24:40 crc kubenswrapper[4863]: I1205 08:24:40.627862 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" path="/var/lib/kubelet/pods/793c0030-7ab4-41a3-923b-192c80b1ed04/volumes" Dec 05 08:24:50 crc kubenswrapper[4863]: I1205 08:24:50.602882 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:24:50 crc kubenswrapper[4863]: E1205 08:24:50.604401 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:24:52 crc kubenswrapper[4863]: I1205 08:24:52.310623 4863 generic.go:334] "Generic (PLEG): container finished" podID="178bee9c-f288-4ca1-ac2a-d8295a776458" containerID="3442edd070f6e84c4c25fc544727bd0b2462af7973aa451c938f68129674c694" exitCode=0 Dec 05 08:24:52 crc kubenswrapper[4863]: I1205 08:24:52.310794 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" event={"ID":"178bee9c-f288-4ca1-ac2a-d8295a776458","Type":"ContainerDied","Data":"3442edd070f6e84c4c25fc544727bd0b2462af7973aa451c938f68129674c694"} Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.812025 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.971962 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key\") pod \"178bee9c-f288-4ca1-ac2a-d8295a776458\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.972055 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph\") pod \"178bee9c-f288-4ca1-ac2a-d8295a776458\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.972158 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory\") pod \"178bee9c-f288-4ca1-ac2a-d8295a776458\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.972336 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle\") pod \"178bee9c-f288-4ca1-ac2a-d8295a776458\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.972384 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kl9cq\" (UniqueName: \"kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq\") pod \"178bee9c-f288-4ca1-ac2a-d8295a776458\" (UID: \"178bee9c-f288-4ca1-ac2a-d8295a776458\") " Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.977812 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph" (OuterVolumeSpecName: "ceph") pod "178bee9c-f288-4ca1-ac2a-d8295a776458" (UID: "178bee9c-f288-4ca1-ac2a-d8295a776458"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.978173 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq" (OuterVolumeSpecName: "kube-api-access-kl9cq") pod "178bee9c-f288-4ca1-ac2a-d8295a776458" (UID: "178bee9c-f288-4ca1-ac2a-d8295a776458"). InnerVolumeSpecName "kube-api-access-kl9cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.979994 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "178bee9c-f288-4ca1-ac2a-d8295a776458" (UID: "178bee9c-f288-4ca1-ac2a-d8295a776458"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:24:53 crc kubenswrapper[4863]: I1205 08:24:53.999059 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory" (OuterVolumeSpecName: "inventory") pod "178bee9c-f288-4ca1-ac2a-d8295a776458" (UID: "178bee9c-f288-4ca1-ac2a-d8295a776458"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.002591 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "178bee9c-f288-4ca1-ac2a-d8295a776458" (UID: "178bee9c-f288-4ca1-ac2a-d8295a776458"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.075232 4863 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.075817 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kl9cq\" (UniqueName: \"kubernetes.io/projected/178bee9c-f288-4ca1-ac2a-d8295a776458-kube-api-access-kl9cq\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.075945 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.076095 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.076196 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/178bee9c-f288-4ca1-ac2a-d8295a776458-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.332277 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" event={"ID":"178bee9c-f288-4ca1-ac2a-d8295a776458","Type":"ContainerDied","Data":"3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba"} Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.332325 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d72add25d1a35c5f044662cb8420cc2c658de0fa5f461c10110064b60d9bdba" Dec 05 08:24:54 crc kubenswrapper[4863]: I1205 08:24:54.332337 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c549tp" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.646229 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz"] Dec 05 08:24:55 crc kubenswrapper[4863]: E1205 08:24:55.648568 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="init" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.648591 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="init" Dec 05 08:24:55 crc kubenswrapper[4863]: E1205 08:24:55.648616 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178bee9c-f288-4ca1-ac2a-d8295a776458" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.648624 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="178bee9c-f288-4ca1-ac2a-d8295a776458" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 05 08:24:55 crc kubenswrapper[4863]: E1205 08:24:55.648675 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="dnsmasq-dns" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.648682 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="dnsmasq-dns" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.652046 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="178bee9c-f288-4ca1-ac2a-d8295a776458" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.652125 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="793c0030-7ab4-41a3-923b-192c80b1ed04" containerName="dnsmasq-dns" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.653670 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.666555 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.667461 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.671128 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.671251 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.709223 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.709392 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.709695 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzcvd\" (UniqueName: \"kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.709727 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.709771 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.711598 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz"] Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.824611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.824716 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.824872 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzcvd\" (UniqueName: \"kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.824903 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.824943 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.831552 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.831797 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.840760 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.842118 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzcvd\" (UniqueName: \"kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.845665 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:55 crc kubenswrapper[4863]: I1205 08:24:55.999212 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:24:56 crc kubenswrapper[4863]: I1205 08:24:56.584699 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz"] Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.066318 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-78dd7"] Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.095796 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-e11c-account-create-update-q5ps9"] Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.113025 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-78dd7"] Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.126249 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-e11c-account-create-update-q5ps9"] Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.361402 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" event={"ID":"117a0ebe-6023-41c5-b416-f583e46b4ce4","Type":"ContainerStarted","Data":"e1a7adacc2d0894b8caecc503482cd4378cbd2a8264bf3b0302e6d1b21a614a2"} Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.361832 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" event={"ID":"117a0ebe-6023-41c5-b416-f583e46b4ce4","Type":"ContainerStarted","Data":"176bc8ad22619e2c72feab88e44727e748e32c9da464ef0118183049af7a1109"} Dec 05 08:24:57 crc kubenswrapper[4863]: I1205 08:24:57.384263 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" podStartSLOduration=1.96301479 podStartE2EDuration="2.384237501s" podCreationTimestamp="2025-12-05 08:24:55 +0000 UTC" firstStartedPulling="2025-12-05 08:24:56.597307516 +0000 UTC m=+5924.323304556" lastFinishedPulling="2025-12-05 08:24:57.018530217 +0000 UTC m=+5924.744527267" observedRunningTime="2025-12-05 08:24:57.377131668 +0000 UTC m=+5925.103128708" watchObservedRunningTime="2025-12-05 08:24:57.384237501 +0000 UTC m=+5925.110234541" Dec 05 08:24:58 crc kubenswrapper[4863]: I1205 08:24:58.619532 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f1f484b-c1ae-47dd-9a9d-cea73101d2cf" path="/var/lib/kubelet/pods/3f1f484b-c1ae-47dd-9a9d-cea73101d2cf/volumes" Dec 05 08:24:58 crc kubenswrapper[4863]: I1205 08:24:58.620673 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a035e6d7-f5f5-45ee-9636-dc4c3c33456b" path="/var/lib/kubelet/pods/a035e6d7-f5f5-45ee-9636-dc4c3c33456b/volumes" Dec 05 08:25:03 crc kubenswrapper[4863]: I1205 08:25:03.601875 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:25:03 crc kubenswrapper[4863]: E1205 08:25:03.602642 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:25:14 crc kubenswrapper[4863]: I1205 08:25:14.602147 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:25:14 crc kubenswrapper[4863]: E1205 08:25:14.604536 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:25:24 crc kubenswrapper[4863]: I1205 08:25:24.056558 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-rlnq8"] Dec 05 08:25:24 crc kubenswrapper[4863]: I1205 08:25:24.072160 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-rlnq8"] Dec 05 08:25:24 crc kubenswrapper[4863]: I1205 08:25:24.623808 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9ff5445-d86b-48bb-b323-9911e1ba1a66" path="/var/lib/kubelet/pods/d9ff5445-d86b-48bb-b323-9911e1ba1a66/volumes" Dec 05 08:25:29 crc kubenswrapper[4863]: I1205 08:25:29.602751 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:25:29 crc kubenswrapper[4863]: E1205 08:25:29.603736 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:25:38 crc kubenswrapper[4863]: I1205 08:25:38.600733 4863 scope.go:117] "RemoveContainer" containerID="9432b0dcfd79c99d718106e63060290b5abc8375497d9fea1932f1369c81bbbb" Dec 05 08:25:38 crc kubenswrapper[4863]: I1205 08:25:38.742779 4863 scope.go:117] "RemoveContainer" containerID="d9b29641eaee6962c47db3fa5a5bb50cea33b34e2a26b82288ebe7c355e96181" Dec 05 08:25:38 crc kubenswrapper[4863]: I1205 08:25:38.772406 4863 scope.go:117] "RemoveContainer" containerID="78e373ddd04cfa49e667ff50d89979b804d9946e14846e58dea72b87a0f2a7cb" Dec 05 08:25:44 crc kubenswrapper[4863]: I1205 08:25:44.601934 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:25:44 crc kubenswrapper[4863]: E1205 08:25:44.602689 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:25:57 crc kubenswrapper[4863]: I1205 08:25:57.602355 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:25:57 crc kubenswrapper[4863]: E1205 08:25:57.605413 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:26:10 crc kubenswrapper[4863]: I1205 08:26:10.602177 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:26:10 crc kubenswrapper[4863]: E1205 08:26:10.603087 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:26:23 crc kubenswrapper[4863]: I1205 08:26:23.602669 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:26:23 crc kubenswrapper[4863]: E1205 08:26:23.603296 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:26:37 crc kubenswrapper[4863]: I1205 08:26:37.603105 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:26:37 crc kubenswrapper[4863]: E1205 08:26:37.604034 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:26:50 crc kubenswrapper[4863]: I1205 08:26:50.602637 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:26:51 crc kubenswrapper[4863]: I1205 08:26:51.660200 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282"} Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.546848 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.550968 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.579146 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.661153 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.661685 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5d5\" (UniqueName: \"kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.661879 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.763604 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5d5\" (UniqueName: \"kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.763680 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.763740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.764224 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.764362 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.785701 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5d5\" (UniqueName: \"kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5\") pod \"redhat-marketplace-cpc9w\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:51 crc kubenswrapper[4863]: I1205 08:28:51.878272 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:28:52 crc kubenswrapper[4863]: I1205 08:28:52.358918 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:28:53 crc kubenswrapper[4863]: I1205 08:28:53.034962 4863 generic.go:334] "Generic (PLEG): container finished" podID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerID="8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165" exitCode=0 Dec 05 08:28:53 crc kubenswrapper[4863]: I1205 08:28:53.035021 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerDied","Data":"8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165"} Dec 05 08:28:53 crc kubenswrapper[4863]: I1205 08:28:53.035234 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerStarted","Data":"e4d6fcdb366c468be6b1d3532d434187045a49dd6624530dad9bdc51134aa941"} Dec 05 08:28:53 crc kubenswrapper[4863]: I1205 08:28:53.038656 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:28:54 crc kubenswrapper[4863]: I1205 08:28:54.046513 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerStarted","Data":"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427"} Dec 05 08:28:55 crc kubenswrapper[4863]: I1205 08:28:55.076037 4863 generic.go:334] "Generic (PLEG): container finished" podID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerID="c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427" exitCode=0 Dec 05 08:28:55 crc kubenswrapper[4863]: I1205 08:28:55.076382 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerDied","Data":"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427"} Dec 05 08:28:56 crc kubenswrapper[4863]: I1205 08:28:56.087701 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerStarted","Data":"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9"} Dec 05 08:28:56 crc kubenswrapper[4863]: I1205 08:28:56.118287 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cpc9w" podStartSLOduration=2.537108604 podStartE2EDuration="5.118258068s" podCreationTimestamp="2025-12-05 08:28:51 +0000 UTC" firstStartedPulling="2025-12-05 08:28:53.038409192 +0000 UTC m=+6160.764406232" lastFinishedPulling="2025-12-05 08:28:55.619558616 +0000 UTC m=+6163.345555696" observedRunningTime="2025-12-05 08:28:56.107553549 +0000 UTC m=+6163.833550629" watchObservedRunningTime="2025-12-05 08:28:56.118258068 +0000 UTC m=+6163.844255128" Dec 05 08:29:01 crc kubenswrapper[4863]: I1205 08:29:01.878600 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:01 crc kubenswrapper[4863]: I1205 08:29:01.879166 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:01 crc kubenswrapper[4863]: I1205 08:29:01.950319 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:02 crc kubenswrapper[4863]: I1205 08:29:02.207380 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:02 crc kubenswrapper[4863]: I1205 08:29:02.266101 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.176973 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cpc9w" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="registry-server" containerID="cri-o://eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9" gracePeriod=2 Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.644294 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.737623 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content\") pod \"bfc80196-d068-4c87-a8cf-7636caf118fc\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.737733 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv5d5\" (UniqueName: \"kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5\") pod \"bfc80196-d068-4c87-a8cf-7636caf118fc\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.737877 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities\") pod \"bfc80196-d068-4c87-a8cf-7636caf118fc\" (UID: \"bfc80196-d068-4c87-a8cf-7636caf118fc\") " Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.738774 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities" (OuterVolumeSpecName: "utilities") pod "bfc80196-d068-4c87-a8cf-7636caf118fc" (UID: "bfc80196-d068-4c87-a8cf-7636caf118fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.743593 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5" (OuterVolumeSpecName: "kube-api-access-wv5d5") pod "bfc80196-d068-4c87-a8cf-7636caf118fc" (UID: "bfc80196-d068-4c87-a8cf-7636caf118fc"). InnerVolumeSpecName "kube-api-access-wv5d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.756499 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bfc80196-d068-4c87-a8cf-7636caf118fc" (UID: "bfc80196-d068-4c87-a8cf-7636caf118fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.840778 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.840813 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv5d5\" (UniqueName: \"kubernetes.io/projected/bfc80196-d068-4c87-a8cf-7636caf118fc-kube-api-access-wv5d5\") on node \"crc\" DevicePath \"\"" Dec 05 08:29:04 crc kubenswrapper[4863]: I1205 08:29:04.840824 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfc80196-d068-4c87-a8cf-7636caf118fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.192058 4863 generic.go:334] "Generic (PLEG): container finished" podID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerID="eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9" exitCode=0 Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.192124 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpc9w" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.192122 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerDied","Data":"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9"} Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.192579 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpc9w" event={"ID":"bfc80196-d068-4c87-a8cf-7636caf118fc","Type":"ContainerDied","Data":"e4d6fcdb366c468be6b1d3532d434187045a49dd6624530dad9bdc51134aa941"} Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.192602 4863 scope.go:117] "RemoveContainer" containerID="eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.241670 4863 scope.go:117] "RemoveContainer" containerID="c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.245229 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.257855 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpc9w"] Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.275362 4863 scope.go:117] "RemoveContainer" containerID="8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.321356 4863 scope.go:117] "RemoveContainer" containerID="eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9" Dec 05 08:29:05 crc kubenswrapper[4863]: E1205 08:29:05.321799 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9\": container with ID starting with eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9 not found: ID does not exist" containerID="eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.321830 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9"} err="failed to get container status \"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9\": rpc error: code = NotFound desc = could not find container \"eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9\": container with ID starting with eaec1f888e81be72724654bf1022e0a71cb16c74759be6097c4fbc6cdf9d97f9 not found: ID does not exist" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.321855 4863 scope.go:117] "RemoveContainer" containerID="c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427" Dec 05 08:29:05 crc kubenswrapper[4863]: E1205 08:29:05.322176 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427\": container with ID starting with c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427 not found: ID does not exist" containerID="c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.322224 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427"} err="failed to get container status \"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427\": rpc error: code = NotFound desc = could not find container \"c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427\": container with ID starting with c8f2c8c91bac8c796868bb2565d93e235ba0eff12bf430e9eca91106cdd4d427 not found: ID does not exist" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.322252 4863 scope.go:117] "RemoveContainer" containerID="8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165" Dec 05 08:29:05 crc kubenswrapper[4863]: E1205 08:29:05.322606 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165\": container with ID starting with 8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165 not found: ID does not exist" containerID="8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165" Dec 05 08:29:05 crc kubenswrapper[4863]: I1205 08:29:05.322628 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165"} err="failed to get container status \"8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165\": rpc error: code = NotFound desc = could not find container \"8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165\": container with ID starting with 8cc58c9d7a6edde66c8a614c9efa4ebb1f8ee5dc25887552943fc06b2567a165 not found: ID does not exist" Dec 05 08:29:06 crc kubenswrapper[4863]: I1205 08:29:06.619134 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" path="/var/lib/kubelet/pods/bfc80196-d068-4c87-a8cf-7636caf118fc/volumes" Dec 05 08:29:08 crc kubenswrapper[4863]: I1205 08:29:08.464784 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:29:08 crc kubenswrapper[4863]: I1205 08:29:08.465214 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:29:38 crc kubenswrapper[4863]: I1205 08:29:38.464404 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:29:38 crc kubenswrapper[4863]: I1205 08:29:38.465005 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.055940 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-lwzdb"] Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.066727 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-0a24-account-create-update-8kkx2"] Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.078462 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-lwzdb"] Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.090665 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-0a24-account-create-update-8kkx2"] Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.224192 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:29:48 crc kubenswrapper[4863]: E1205 08:29:48.224733 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="extract-content" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.224764 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="extract-content" Dec 05 08:29:48 crc kubenswrapper[4863]: E1205 08:29:48.224787 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="registry-server" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.224796 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="registry-server" Dec 05 08:29:48 crc kubenswrapper[4863]: E1205 08:29:48.224813 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="extract-utilities" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.224820 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="extract-utilities" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.225071 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfc80196-d068-4c87-a8cf-7636caf118fc" containerName="registry-server" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.226956 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.240304 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.364381 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s48x8\" (UniqueName: \"kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.364709 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.364892 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.466871 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s48x8\" (UniqueName: \"kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.467060 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.467099 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.467547 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.467643 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.488493 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s48x8\" (UniqueName: \"kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8\") pod \"redhat-operators-95lq7\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.551114 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.617990 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="487987c6-44ef-446d-805f-2a6e57dcc81b" path="/var/lib/kubelet/pods/487987c6-44ef-446d-805f-2a6e57dcc81b/volumes" Dec 05 08:29:48 crc kubenswrapper[4863]: I1205 08:29:48.619191 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e259db5d-318c-484f-b907-3ff3f053a96d" path="/var/lib/kubelet/pods/e259db5d-318c-484f-b907-3ff3f053a96d/volumes" Dec 05 08:29:49 crc kubenswrapper[4863]: I1205 08:29:49.040319 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:29:49 crc kubenswrapper[4863]: I1205 08:29:49.606259 4863 generic.go:334] "Generic (PLEG): container finished" podID="9a2a6081-4133-449c-b308-769f52196a94" containerID="032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50" exitCode=0 Dec 05 08:29:49 crc kubenswrapper[4863]: I1205 08:29:49.606464 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerDied","Data":"032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50"} Dec 05 08:29:49 crc kubenswrapper[4863]: I1205 08:29:49.607261 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerStarted","Data":"44078d0874a2451c9fd929927594f5046da1931851cc64b9074e3c0f5d1be051"} Dec 05 08:29:50 crc kubenswrapper[4863]: I1205 08:29:50.617633 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerStarted","Data":"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe"} Dec 05 08:29:54 crc kubenswrapper[4863]: I1205 08:29:54.656137 4863 generic.go:334] "Generic (PLEG): container finished" podID="9a2a6081-4133-449c-b308-769f52196a94" containerID="b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe" exitCode=0 Dec 05 08:29:54 crc kubenswrapper[4863]: I1205 08:29:54.656188 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerDied","Data":"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe"} Dec 05 08:29:55 crc kubenswrapper[4863]: I1205 08:29:55.678673 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerStarted","Data":"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1"} Dec 05 08:29:55 crc kubenswrapper[4863]: I1205 08:29:55.708637 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-95lq7" podStartSLOduration=2.270729514 podStartE2EDuration="7.708606111s" podCreationTimestamp="2025-12-05 08:29:48 +0000 UTC" firstStartedPulling="2025-12-05 08:29:49.613838317 +0000 UTC m=+6217.339835357" lastFinishedPulling="2025-12-05 08:29:55.051714914 +0000 UTC m=+6222.777711954" observedRunningTime="2025-12-05 08:29:55.697755678 +0000 UTC m=+6223.423752728" watchObservedRunningTime="2025-12-05 08:29:55.708606111 +0000 UTC m=+6223.434603161" Dec 05 08:29:58 crc kubenswrapper[4863]: I1205 08:29:58.551590 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:58 crc kubenswrapper[4863]: I1205 08:29:58.552169 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:29:59 crc kubenswrapper[4863]: I1205 08:29:59.610574 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-95lq7" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="registry-server" probeResult="failure" output=< Dec 05 08:29:59 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 08:29:59 crc kubenswrapper[4863]: > Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.194818 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz"] Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.196214 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.198769 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.199698 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.205163 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz"] Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.228332 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.228393 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lph7g\" (UniqueName: \"kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.228429 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.330392 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.330455 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lph7g\" (UniqueName: \"kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.330527 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.331621 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.337689 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.349996 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lph7g\" (UniqueName: \"kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g\") pod \"collect-profiles-29415390-wq8cz\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.521274 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:00 crc kubenswrapper[4863]: I1205 08:30:00.999342 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz"] Dec 05 08:30:01 crc kubenswrapper[4863]: I1205 08:30:01.740566 4863 generic.go:334] "Generic (PLEG): container finished" podID="c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" containerID="c5f98559c6664b6fa1d9e785ae9cad40fa4c73adcc4438dbcf111dd55ca8e416" exitCode=0 Dec 05 08:30:01 crc kubenswrapper[4863]: I1205 08:30:01.740641 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" event={"ID":"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60","Type":"ContainerDied","Data":"c5f98559c6664b6fa1d9e785ae9cad40fa4c73adcc4438dbcf111dd55ca8e416"} Dec 05 08:30:01 crc kubenswrapper[4863]: I1205 08:30:01.740871 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" event={"ID":"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60","Type":"ContainerStarted","Data":"d2baa1be1edc01fdc84ff645216a5898420515bc719f169840d107df03b37dd0"} Dec 05 08:30:02 crc kubenswrapper[4863]: I1205 08:30:02.034450 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-lht28"] Dec 05 08:30:02 crc kubenswrapper[4863]: I1205 08:30:02.044731 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-lht28"] Dec 05 08:30:02 crc kubenswrapper[4863]: I1205 08:30:02.623675 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21009658-c5e6-4b14-9328-47545509992e" path="/var/lib/kubelet/pods/21009658-c5e6-4b14-9328-47545509992e/volumes" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.198721 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.202215 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume\") pod \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.202286 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume\") pod \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.203353 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume" (OuterVolumeSpecName: "config-volume") pod "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" (UID: "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.211049 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" (UID: "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.304161 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lph7g\" (UniqueName: \"kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g\") pod \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\" (UID: \"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60\") " Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.304704 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.304722 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.308487 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g" (OuterVolumeSpecName: "kube-api-access-lph7g") pod "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" (UID: "c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60"). InnerVolumeSpecName "kube-api-access-lph7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.406728 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lph7g\" (UniqueName: \"kubernetes.io/projected/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60-kube-api-access-lph7g\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.765758 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" event={"ID":"c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60","Type":"ContainerDied","Data":"d2baa1be1edc01fdc84ff645216a5898420515bc719f169840d107df03b37dd0"} Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.765794 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2baa1be1edc01fdc84ff645216a5898420515bc719f169840d107df03b37dd0" Dec 05 08:30:03 crc kubenswrapper[4863]: I1205 08:30:03.765856 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz" Dec 05 08:30:04 crc kubenswrapper[4863]: I1205 08:30:04.270189 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd"] Dec 05 08:30:04 crc kubenswrapper[4863]: I1205 08:30:04.279892 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415345-c6hwd"] Dec 05 08:30:04 crc kubenswrapper[4863]: I1205 08:30:04.619018 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bc1e486-4f3a-49e3-bca3-01cf38552df9" path="/var/lib/kubelet/pods/5bc1e486-4f3a-49e3-bca3-01cf38552df9/volumes" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.464608 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.465403 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.465452 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.466219 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.466271 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282" gracePeriod=600 Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.597327 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.656812 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.836114 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282" exitCode=0 Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.836181 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282"} Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.836240 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6"} Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.836261 4863 scope.go:117] "RemoveContainer" containerID="c2fd2b1c1b219d819f8f073dcb8688fcb1e4537c8bd8a6b030c79dda92ef7f5c" Dec 05 08:30:08 crc kubenswrapper[4863]: I1205 08:30:08.837668 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:30:09 crc kubenswrapper[4863]: I1205 08:30:09.848248 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-95lq7" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="registry-server" containerID="cri-o://79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1" gracePeriod=2 Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.341411 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.466880 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities\") pod \"9a2a6081-4133-449c-b308-769f52196a94\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.467386 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s48x8\" (UniqueName: \"kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8\") pod \"9a2a6081-4133-449c-b308-769f52196a94\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.468006 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities" (OuterVolumeSpecName: "utilities") pod "9a2a6081-4133-449c-b308-769f52196a94" (UID: "9a2a6081-4133-449c-b308-769f52196a94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.468312 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content\") pod \"9a2a6081-4133-449c-b308-769f52196a94\" (UID: \"9a2a6081-4133-449c-b308-769f52196a94\") " Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.469401 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.474923 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8" (OuterVolumeSpecName: "kube-api-access-s48x8") pod "9a2a6081-4133-449c-b308-769f52196a94" (UID: "9a2a6081-4133-449c-b308-769f52196a94"). InnerVolumeSpecName "kube-api-access-s48x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.571707 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s48x8\" (UniqueName: \"kubernetes.io/projected/9a2a6081-4133-449c-b308-769f52196a94-kube-api-access-s48x8\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.588116 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a2a6081-4133-449c-b308-769f52196a94" (UID: "9a2a6081-4133-449c-b308-769f52196a94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.674887 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a2a6081-4133-449c-b308-769f52196a94-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.860019 4863 generic.go:334] "Generic (PLEG): container finished" podID="9a2a6081-4133-449c-b308-769f52196a94" containerID="79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1" exitCode=0 Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.860075 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerDied","Data":"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1"} Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.860096 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95lq7" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.860109 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95lq7" event={"ID":"9a2a6081-4133-449c-b308-769f52196a94","Type":"ContainerDied","Data":"44078d0874a2451c9fd929927594f5046da1931851cc64b9074e3c0f5d1be051"} Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.860130 4863 scope.go:117] "RemoveContainer" containerID="79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.887234 4863 scope.go:117] "RemoveContainer" containerID="b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.892692 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.908319 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-95lq7"] Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.929899 4863 scope.go:117] "RemoveContainer" containerID="032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.962765 4863 scope.go:117] "RemoveContainer" containerID="79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1" Dec 05 08:30:10 crc kubenswrapper[4863]: E1205 08:30:10.963314 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1\": container with ID starting with 79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1 not found: ID does not exist" containerID="79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.963357 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1"} err="failed to get container status \"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1\": rpc error: code = NotFound desc = could not find container \"79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1\": container with ID starting with 79d478c9974f1939317d5c0b5a3705e843cd4e4a9bd555225a370e766c3b74a1 not found: ID does not exist" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.963382 4863 scope.go:117] "RemoveContainer" containerID="b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe" Dec 05 08:30:10 crc kubenswrapper[4863]: E1205 08:30:10.963658 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe\": container with ID starting with b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe not found: ID does not exist" containerID="b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.963677 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe"} err="failed to get container status \"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe\": rpc error: code = NotFound desc = could not find container \"b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe\": container with ID starting with b30373c093613ac7ff1002c27137285d9cfa93f4040c9d8ec80772c42851d9fe not found: ID does not exist" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.963691 4863 scope.go:117] "RemoveContainer" containerID="032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50" Dec 05 08:30:10 crc kubenswrapper[4863]: E1205 08:30:10.964131 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50\": container with ID starting with 032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50 not found: ID does not exist" containerID="032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50" Dec 05 08:30:10 crc kubenswrapper[4863]: I1205 08:30:10.964156 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50"} err="failed to get container status \"032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50\": rpc error: code = NotFound desc = could not find container \"032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50\": container with ID starting with 032b3a369d84a9166ff846eb2d19292a42169d4f908d64bd52cf5177daf4ab50 not found: ID does not exist" Dec 05 08:30:12 crc kubenswrapper[4863]: I1205 08:30:12.618630 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a2a6081-4133-449c-b308-769f52196a94" path="/var/lib/kubelet/pods/9a2a6081-4133-449c-b308-769f52196a94/volumes" Dec 05 08:30:39 crc kubenswrapper[4863]: I1205 08:30:39.056654 4863 scope.go:117] "RemoveContainer" containerID="dda52595d5274770feb8eb8265baf75afbb9fbae229d59c173604766310f67e8" Dec 05 08:30:39 crc kubenswrapper[4863]: I1205 08:30:39.116041 4863 scope.go:117] "RemoveContainer" containerID="2b3bba5fefc8e577d836cc28b0c136d8064d3f15deb260e9f3ec29293d096bb7" Dec 05 08:30:39 crc kubenswrapper[4863]: I1205 08:30:39.151351 4863 scope.go:117] "RemoveContainer" containerID="db3a1b8262e7ee1cd5b9503d0bdd8e0ced83df5ad3a16f22301b1186b0700e37" Dec 05 08:30:39 crc kubenswrapper[4863]: I1205 08:30:39.199939 4863 scope.go:117] "RemoveContainer" containerID="173a917e746569ccd1a001fb131ba542525262eb3cb4c4b6f85d8d9291c914d0" Dec 05 08:32:08 crc kubenswrapper[4863]: I1205 08:32:08.464667 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:32:08 crc kubenswrapper[4863]: I1205 08:32:08.465272 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.046674 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-3cac-account-create-update-bgjnk"] Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.060078 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-ssf6l"] Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.069582 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-3cac-account-create-update-bgjnk"] Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.077566 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-ssf6l"] Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.614126 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ec2abf8-0166-4043-b0fa-e660b04dad5a" path="/var/lib/kubelet/pods/3ec2abf8-0166-4043-b0fa-e660b04dad5a/volumes" Dec 05 08:32:10 crc kubenswrapper[4863]: I1205 08:32:10.614891 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b" path="/var/lib/kubelet/pods/dc0ebd98-9d3b-4a7d-8628-12fff50b1d7b/volumes" Dec 05 08:32:22 crc kubenswrapper[4863]: I1205 08:32:22.091357 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-4plh9"] Dec 05 08:32:22 crc kubenswrapper[4863]: I1205 08:32:22.104459 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-4plh9"] Dec 05 08:32:22 crc kubenswrapper[4863]: I1205 08:32:22.612832 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3841bf0a-8d71-42a5-891f-211901e10a31" path="/var/lib/kubelet/pods/3841bf0a-8d71-42a5-891f-211901e10a31/volumes" Dec 05 08:32:38 crc kubenswrapper[4863]: I1205 08:32:38.464659 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:32:38 crc kubenswrapper[4863]: I1205 08:32:38.465287 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:32:39 crc kubenswrapper[4863]: I1205 08:32:39.343781 4863 scope.go:117] "RemoveContainer" containerID="dd7baf5589147826871463ab8226b61e4c0422c79e8d8fd6bd7034afa299387d" Dec 05 08:32:39 crc kubenswrapper[4863]: I1205 08:32:39.370074 4863 scope.go:117] "RemoveContainer" containerID="80f802e9f43c39eee411b3568993c3137e9433c414d1587f5cfe9e21b23fc35f" Dec 05 08:32:39 crc kubenswrapper[4863]: I1205 08:32:39.436212 4863 scope.go:117] "RemoveContainer" containerID="f8dab3e5b8ac369f73c251aea0e0ee0b4a01b521921c4d222324b8caea3ef743" Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.039944 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-6c01-account-create-update-ww5gs"] Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.053051 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-64cnn"] Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.067872 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-64cnn"] Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.075942 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-6c01-account-create-update-ww5gs"] Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.615183 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18b3fb9d-3132-4643-a98f-0aa97954c4a3" path="/var/lib/kubelet/pods/18b3fb9d-3132-4643-a98f-0aa97954c4a3/volumes" Dec 05 08:32:42 crc kubenswrapper[4863]: I1205 08:32:42.615861 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664dfe89-e4fa-4fe2-97d7-187905492583" path="/var/lib/kubelet/pods/664dfe89-e4fa-4fe2-97d7-187905492583/volumes" Dec 05 08:32:54 crc kubenswrapper[4863]: I1205 08:32:54.045553 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-kbgj9"] Dec 05 08:32:54 crc kubenswrapper[4863]: I1205 08:32:54.060295 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-kbgj9"] Dec 05 08:32:54 crc kubenswrapper[4863]: I1205 08:32:54.616788 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664a0a78-9cd2-4632-9287-b09fac9bae5d" path="/var/lib/kubelet/pods/664a0a78-9cd2-4632-9287-b09fac9bae5d/volumes" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.464257 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.464986 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.465051 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.466401 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.466600 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" gracePeriod=600 Dec 05 08:33:08 crc kubenswrapper[4863]: E1205 08:33:08.624016 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.645775 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" exitCode=0 Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.645837 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6"} Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.645893 4863 scope.go:117] "RemoveContainer" containerID="b8e6e476a58c734faea076692d29c4d6d01bd30323dc96489c43608c29369282" Dec 05 08:33:08 crc kubenswrapper[4863]: I1205 08:33:08.646771 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:33:08 crc kubenswrapper[4863]: E1205 08:33:08.647107 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:33:22 crc kubenswrapper[4863]: I1205 08:33:22.608312 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:33:22 crc kubenswrapper[4863]: E1205 08:33:22.609210 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:33:34 crc kubenswrapper[4863]: I1205 08:33:34.601700 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:33:34 crc kubenswrapper[4863]: E1205 08:33:34.602603 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:33:39 crc kubenswrapper[4863]: I1205 08:33:39.557113 4863 scope.go:117] "RemoveContainer" containerID="bb4892b646a6577478a336670d2ed45e054f1ccd53f7f9931b5d17e5d4883061" Dec 05 08:33:39 crc kubenswrapper[4863]: I1205 08:33:39.594538 4863 scope.go:117] "RemoveContainer" containerID="f26048754260f1f5216dd44300dcd107aaead2c9d8266cb4483e047b677ae4d8" Dec 05 08:33:39 crc kubenswrapper[4863]: I1205 08:33:39.646696 4863 scope.go:117] "RemoveContainer" containerID="62c4f78fda6c98b1c748c3059677a4cb951368c7b7058ad9d25996ff4342c963" Dec 05 08:33:45 crc kubenswrapper[4863]: I1205 08:33:45.605086 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:33:45 crc kubenswrapper[4863]: E1205 08:33:45.606988 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:33:57 crc kubenswrapper[4863]: I1205 08:33:57.602265 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:33:57 crc kubenswrapper[4863]: E1205 08:33:57.603441 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:09 crc kubenswrapper[4863]: I1205 08:34:09.602242 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:34:09 crc kubenswrapper[4863]: E1205 08:34:09.603197 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:20 crc kubenswrapper[4863]: I1205 08:34:20.602172 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:34:20 crc kubenswrapper[4863]: E1205 08:34:20.604281 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:31 crc kubenswrapper[4863]: I1205 08:34:31.602642 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:34:31 crc kubenswrapper[4863]: E1205 08:34:31.603358 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:42 crc kubenswrapper[4863]: I1205 08:34:42.608796 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:34:42 crc kubenswrapper[4863]: E1205 08:34:42.609665 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:45 crc kubenswrapper[4863]: I1205 08:34:45.618633 4863 generic.go:334] "Generic (PLEG): container finished" podID="117a0ebe-6023-41c5-b416-f583e46b4ce4" containerID="e1a7adacc2d0894b8caecc503482cd4378cbd2a8264bf3b0302e6d1b21a614a2" exitCode=0 Dec 05 08:34:45 crc kubenswrapper[4863]: I1205 08:34:45.618751 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" event={"ID":"117a0ebe-6023-41c5-b416-f583e46b4ce4","Type":"ContainerDied","Data":"e1a7adacc2d0894b8caecc503482cd4378cbd2a8264bf3b0302e6d1b21a614a2"} Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.087358 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.209272 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory\") pod \"117a0ebe-6023-41c5-b416-f583e46b4ce4\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.209405 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph\") pod \"117a0ebe-6023-41c5-b416-f583e46b4ce4\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.209438 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle\") pod \"117a0ebe-6023-41c5-b416-f583e46b4ce4\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.209601 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key\") pod \"117a0ebe-6023-41c5-b416-f583e46b4ce4\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.209660 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzcvd\" (UniqueName: \"kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd\") pod \"117a0ebe-6023-41c5-b416-f583e46b4ce4\" (UID: \"117a0ebe-6023-41c5-b416-f583e46b4ce4\") " Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.214650 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph" (OuterVolumeSpecName: "ceph") pod "117a0ebe-6023-41c5-b416-f583e46b4ce4" (UID: "117a0ebe-6023-41c5-b416-f583e46b4ce4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.214909 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd" (OuterVolumeSpecName: "kube-api-access-kzcvd") pod "117a0ebe-6023-41c5-b416-f583e46b4ce4" (UID: "117a0ebe-6023-41c5-b416-f583e46b4ce4"). InnerVolumeSpecName "kube-api-access-kzcvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.214985 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "117a0ebe-6023-41c5-b416-f583e46b4ce4" (UID: "117a0ebe-6023-41c5-b416-f583e46b4ce4"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.237107 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory" (OuterVolumeSpecName: "inventory") pod "117a0ebe-6023-41c5-b416-f583e46b4ce4" (UID: "117a0ebe-6023-41c5-b416-f583e46b4ce4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.237797 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "117a0ebe-6023-41c5-b416-f583e46b4ce4" (UID: "117a0ebe-6023-41c5-b416-f583e46b4ce4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.313425 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.313506 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzcvd\" (UniqueName: \"kubernetes.io/projected/117a0ebe-6023-41c5-b416-f583e46b4ce4-kube-api-access-kzcvd\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.313535 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.313557 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.313580 4863 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117a0ebe-6023-41c5-b416-f583e46b4ce4-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.638197 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" event={"ID":"117a0ebe-6023-41c5-b416-f583e46b4ce4","Type":"ContainerDied","Data":"176bc8ad22619e2c72feab88e44727e748e32c9da464ef0118183049af7a1109"} Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.638243 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="176bc8ad22619e2c72feab88e44727e748e32c9da464ef0118183049af7a1109" Dec 05 08:34:47 crc kubenswrapper[4863]: I1205 08:34:47.638302 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802044 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:34:48 crc kubenswrapper[4863]: E1205 08:34:48.802792 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117a0ebe-6023-41c5-b416-f583e46b4ce4" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802810 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="117a0ebe-6023-41c5-b416-f583e46b4ce4" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 05 08:34:48 crc kubenswrapper[4863]: E1205 08:34:48.802834 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="registry-server" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802840 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="registry-server" Dec 05 08:34:48 crc kubenswrapper[4863]: E1205 08:34:48.802854 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="extract-content" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802860 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="extract-content" Dec 05 08:34:48 crc kubenswrapper[4863]: E1205 08:34:48.802873 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" containerName="collect-profiles" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802878 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" containerName="collect-profiles" Dec 05 08:34:48 crc kubenswrapper[4863]: E1205 08:34:48.802888 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="extract-utilities" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.802894 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="extract-utilities" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.803076 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" containerName="collect-profiles" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.803093 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a2a6081-4133-449c-b308-769f52196a94" containerName="registry-server" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.803117 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="117a0ebe-6023-41c5-b416-f583e46b4ce4" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.804714 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.826407 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.947008 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f8bs\" (UniqueName: \"kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.947061 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:48 crc kubenswrapper[4863]: I1205 08:34:48.947367 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.049028 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f8bs\" (UniqueName: \"kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.049084 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.049215 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.049821 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.049818 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.071320 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f8bs\" (UniqueName: \"kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs\") pod \"community-operators-dsm2n\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.136143 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:49 crc kubenswrapper[4863]: I1205 08:34:49.864493 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:34:50 crc kubenswrapper[4863]: I1205 08:34:50.666814 4863 generic.go:334] "Generic (PLEG): container finished" podID="606fcc66-1682-48d4-b557-38a61e0ef505" containerID="f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5" exitCode=0 Dec 05 08:34:50 crc kubenswrapper[4863]: I1205 08:34:50.667161 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerDied","Data":"f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5"} Dec 05 08:34:50 crc kubenswrapper[4863]: I1205 08:34:50.667194 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerStarted","Data":"2c5090ed69e8b4347402000c3d23d2a755b99eb7304611e6197d8490cbc33831"} Dec 05 08:34:50 crc kubenswrapper[4863]: I1205 08:34:50.669006 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:34:51 crc kubenswrapper[4863]: I1205 08:34:51.678986 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerStarted","Data":"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b"} Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.692340 4863 generic.go:334] "Generic (PLEG): container finished" podID="606fcc66-1682-48d4-b557-38a61e0ef505" containerID="2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b" exitCode=0 Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.692426 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerDied","Data":"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b"} Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.951673 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-hktm5"] Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.954078 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.963984 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.964070 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.964183 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.966384 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-hktm5"] Dec 05 08:34:52 crc kubenswrapper[4863]: I1205 08:34:52.969020 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.036179 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.036433 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.036508 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.036617 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.036726 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwdng\" (UniqueName: \"kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.138381 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwdng\" (UniqueName: \"kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.138517 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.138572 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.138599 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.138647 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.146274 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.146636 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.147850 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.148818 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.156855 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwdng\" (UniqueName: \"kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng\") pod \"bootstrap-openstack-openstack-cell1-hktm5\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.280466 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.709337 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerStarted","Data":"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547"} Dec 05 08:34:53 crc kubenswrapper[4863]: I1205 08:34:53.735905 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dsm2n" podStartSLOduration=3.335265784 podStartE2EDuration="5.735885691s" podCreationTimestamp="2025-12-05 08:34:48 +0000 UTC" firstStartedPulling="2025-12-05 08:34:50.668810254 +0000 UTC m=+6518.394807294" lastFinishedPulling="2025-12-05 08:34:53.069430161 +0000 UTC m=+6520.795427201" observedRunningTime="2025-12-05 08:34:53.729114356 +0000 UTC m=+6521.455111406" watchObservedRunningTime="2025-12-05 08:34:53.735885691 +0000 UTC m=+6521.461882731" Dec 05 08:34:54 crc kubenswrapper[4863]: I1205 08:34:53.858381 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-hktm5"] Dec 05 08:34:54 crc kubenswrapper[4863]: W1205 08:34:53.868092 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1986f6c_07f7_4f07_8fac_a10054e34670.slice/crio-b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592 WatchSource:0}: Error finding container b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592: Status 404 returned error can't find the container with id b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592 Dec 05 08:34:54 crc kubenswrapper[4863]: I1205 08:34:54.750585 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" event={"ID":"f1986f6c-07f7-4f07-8fac-a10054e34670","Type":"ContainerStarted","Data":"8d630841a8b2ee6a96c3e14a28b6058fbfe664a3efe0d22bba6f90738b75ab7f"} Dec 05 08:34:54 crc kubenswrapper[4863]: I1205 08:34:54.750891 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" event={"ID":"f1986f6c-07f7-4f07-8fac-a10054e34670","Type":"ContainerStarted","Data":"b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592"} Dec 05 08:34:54 crc kubenswrapper[4863]: I1205 08:34:54.804086 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" podStartSLOduration=2.285183374 podStartE2EDuration="2.804055637s" podCreationTimestamp="2025-12-05 08:34:52 +0000 UTC" firstStartedPulling="2025-12-05 08:34:53.871898227 +0000 UTC m=+6521.597895267" lastFinishedPulling="2025-12-05 08:34:54.39077048 +0000 UTC m=+6522.116767530" observedRunningTime="2025-12-05 08:34:54.796347669 +0000 UTC m=+6522.522344719" watchObservedRunningTime="2025-12-05 08:34:54.804055637 +0000 UTC m=+6522.530052677" Dec 05 08:34:56 crc kubenswrapper[4863]: I1205 08:34:56.606160 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:34:56 crc kubenswrapper[4863]: E1205 08:34:56.606763 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:34:59 crc kubenswrapper[4863]: I1205 08:34:59.137226 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:59 crc kubenswrapper[4863]: I1205 08:34:59.138405 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:59 crc kubenswrapper[4863]: I1205 08:34:59.188277 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:59 crc kubenswrapper[4863]: I1205 08:34:59.848508 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:34:59 crc kubenswrapper[4863]: I1205 08:34:59.908213 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:35:01 crc kubenswrapper[4863]: I1205 08:35:01.813999 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dsm2n" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="registry-server" containerID="cri-o://d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547" gracePeriod=2 Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.352173 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.471346 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f8bs\" (UniqueName: \"kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs\") pod \"606fcc66-1682-48d4-b557-38a61e0ef505\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.471438 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities\") pod \"606fcc66-1682-48d4-b557-38a61e0ef505\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.471681 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content\") pod \"606fcc66-1682-48d4-b557-38a61e0ef505\" (UID: \"606fcc66-1682-48d4-b557-38a61e0ef505\") " Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.472318 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities" (OuterVolumeSpecName: "utilities") pod "606fcc66-1682-48d4-b557-38a61e0ef505" (UID: "606fcc66-1682-48d4-b557-38a61e0ef505"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.479729 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs" (OuterVolumeSpecName: "kube-api-access-7f8bs") pod "606fcc66-1682-48d4-b557-38a61e0ef505" (UID: "606fcc66-1682-48d4-b557-38a61e0ef505"). InnerVolumeSpecName "kube-api-access-7f8bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.525501 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "606fcc66-1682-48d4-b557-38a61e0ef505" (UID: "606fcc66-1682-48d4-b557-38a61e0ef505"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.574417 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.574463 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f8bs\" (UniqueName: \"kubernetes.io/projected/606fcc66-1682-48d4-b557-38a61e0ef505-kube-api-access-7f8bs\") on node \"crc\" DevicePath \"\"" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.574552 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/606fcc66-1682-48d4-b557-38a61e0ef505-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.824532 4863 generic.go:334] "Generic (PLEG): container finished" podID="606fcc66-1682-48d4-b557-38a61e0ef505" containerID="d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547" exitCode=0 Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.824568 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsm2n" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.824589 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerDied","Data":"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547"} Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.825862 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsm2n" event={"ID":"606fcc66-1682-48d4-b557-38a61e0ef505","Type":"ContainerDied","Data":"2c5090ed69e8b4347402000c3d23d2a755b99eb7304611e6197d8490cbc33831"} Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.825900 4863 scope.go:117] "RemoveContainer" containerID="d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.852927 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.854510 4863 scope.go:117] "RemoveContainer" containerID="2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.866223 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dsm2n"] Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.876269 4863 scope.go:117] "RemoveContainer" containerID="f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.931613 4863 scope.go:117] "RemoveContainer" containerID="d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547" Dec 05 08:35:02 crc kubenswrapper[4863]: E1205 08:35:02.932152 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547\": container with ID starting with d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547 not found: ID does not exist" containerID="d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.932202 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547"} err="failed to get container status \"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547\": rpc error: code = NotFound desc = could not find container \"d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547\": container with ID starting with d71bbfef8ff68af3ab45b0e43f8130591a41d5b18ea3ab398e8c9ac1f9dd6547 not found: ID does not exist" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.932229 4863 scope.go:117] "RemoveContainer" containerID="2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b" Dec 05 08:35:02 crc kubenswrapper[4863]: E1205 08:35:02.932744 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b\": container with ID starting with 2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b not found: ID does not exist" containerID="2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.932775 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b"} err="failed to get container status \"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b\": rpc error: code = NotFound desc = could not find container \"2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b\": container with ID starting with 2a574f66bb985615532910067506659e01015fbe6cb8c5269b55e1955cf5a08b not found: ID does not exist" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.932793 4863 scope.go:117] "RemoveContainer" containerID="f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5" Dec 05 08:35:02 crc kubenswrapper[4863]: E1205 08:35:02.933205 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5\": container with ID starting with f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5 not found: ID does not exist" containerID="f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5" Dec 05 08:35:02 crc kubenswrapper[4863]: I1205 08:35:02.933251 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5"} err="failed to get container status \"f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5\": rpc error: code = NotFound desc = could not find container \"f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5\": container with ID starting with f93c23f6e2d006f3d18e8cb5eaa7516184599b87715c00d2d4e80c12bb5f2cd5 not found: ID does not exist" Dec 05 08:35:04 crc kubenswrapper[4863]: I1205 08:35:04.612942 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" path="/var/lib/kubelet/pods/606fcc66-1682-48d4-b557-38a61e0ef505/volumes" Dec 05 08:35:08 crc kubenswrapper[4863]: I1205 08:35:08.603125 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:35:08 crc kubenswrapper[4863]: E1205 08:35:08.603803 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:35:23 crc kubenswrapper[4863]: I1205 08:35:23.602287 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:35:23 crc kubenswrapper[4863]: E1205 08:35:23.603187 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:35:37 crc kubenswrapper[4863]: I1205 08:35:37.602021 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:35:37 crc kubenswrapper[4863]: E1205 08:35:37.603060 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:35:51 crc kubenswrapper[4863]: I1205 08:35:51.602368 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:35:51 crc kubenswrapper[4863]: E1205 08:35:51.603114 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:36:05 crc kubenswrapper[4863]: I1205 08:36:05.602159 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:36:05 crc kubenswrapper[4863]: E1205 08:36:05.603074 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:36:18 crc kubenswrapper[4863]: I1205 08:36:18.602522 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:36:18 crc kubenswrapper[4863]: E1205 08:36:18.603153 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:36:29 crc kubenswrapper[4863]: I1205 08:36:29.602340 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:36:29 crc kubenswrapper[4863]: E1205 08:36:29.603244 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:36:44 crc kubenswrapper[4863]: I1205 08:36:44.602816 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:36:44 crc kubenswrapper[4863]: E1205 08:36:44.604237 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:36:59 crc kubenswrapper[4863]: I1205 08:36:59.601827 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:36:59 crc kubenswrapper[4863]: E1205 08:36:59.602620 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:37:12 crc kubenswrapper[4863]: I1205 08:37:12.613232 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:37:12 crc kubenswrapper[4863]: E1205 08:37:12.614818 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:37:25 crc kubenswrapper[4863]: I1205 08:37:25.602906 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:37:25 crc kubenswrapper[4863]: E1205 08:37:25.603713 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:37:37 crc kubenswrapper[4863]: I1205 08:37:37.602860 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:37:37 crc kubenswrapper[4863]: E1205 08:37:37.603659 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:37:51 crc kubenswrapper[4863]: I1205 08:37:51.602234 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:37:51 crc kubenswrapper[4863]: E1205 08:37:51.603030 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:38:05 crc kubenswrapper[4863]: I1205 08:38:05.657666 4863 generic.go:334] "Generic (PLEG): container finished" podID="f1986f6c-07f7-4f07-8fac-a10054e34670" containerID="8d630841a8b2ee6a96c3e14a28b6058fbfe664a3efe0d22bba6f90738b75ab7f" exitCode=0 Dec 05 08:38:05 crc kubenswrapper[4863]: I1205 08:38:05.657877 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" event={"ID":"f1986f6c-07f7-4f07-8fac-a10054e34670","Type":"ContainerDied","Data":"8d630841a8b2ee6a96c3e14a28b6058fbfe664a3efe0d22bba6f90738b75ab7f"} Dec 05 08:38:06 crc kubenswrapper[4863]: I1205 08:38:06.602194 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:38:06 crc kubenswrapper[4863]: E1205 08:38:06.603193 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.234546 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.324051 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph\") pod \"f1986f6c-07f7-4f07-8fac-a10054e34670\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.324381 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle\") pod \"f1986f6c-07f7-4f07-8fac-a10054e34670\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.324453 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key\") pod \"f1986f6c-07f7-4f07-8fac-a10054e34670\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.324742 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory\") pod \"f1986f6c-07f7-4f07-8fac-a10054e34670\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.325360 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwdng\" (UniqueName: \"kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng\") pod \"f1986f6c-07f7-4f07-8fac-a10054e34670\" (UID: \"f1986f6c-07f7-4f07-8fac-a10054e34670\") " Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.330329 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph" (OuterVolumeSpecName: "ceph") pod "f1986f6c-07f7-4f07-8fac-a10054e34670" (UID: "f1986f6c-07f7-4f07-8fac-a10054e34670"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.330349 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f1986f6c-07f7-4f07-8fac-a10054e34670" (UID: "f1986f6c-07f7-4f07-8fac-a10054e34670"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.331219 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng" (OuterVolumeSpecName: "kube-api-access-gwdng") pod "f1986f6c-07f7-4f07-8fac-a10054e34670" (UID: "f1986f6c-07f7-4f07-8fac-a10054e34670"). InnerVolumeSpecName "kube-api-access-gwdng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.353937 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f1986f6c-07f7-4f07-8fac-a10054e34670" (UID: "f1986f6c-07f7-4f07-8fac-a10054e34670"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.355681 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory" (OuterVolumeSpecName: "inventory") pod "f1986f6c-07f7-4f07-8fac-a10054e34670" (UID: "f1986f6c-07f7-4f07-8fac-a10054e34670"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.429068 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.429122 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwdng\" (UniqueName: \"kubernetes.io/projected/f1986f6c-07f7-4f07-8fac-a10054e34670-kube-api-access-gwdng\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.429135 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.429148 4863 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.429159 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1986f6c-07f7-4f07-8fac-a10054e34670-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.681014 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" event={"ID":"f1986f6c-07f7-4f07-8fac-a10054e34670","Type":"ContainerDied","Data":"b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592"} Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.681076 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b19e9158674249335f781729d2aa123bf67eee61a2463e2625360a20291f6592" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.681123 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-hktm5" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.770650 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4k99f"] Dec 05 08:38:07 crc kubenswrapper[4863]: E1205 08:38:07.771117 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="extract-utilities" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771146 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="extract-utilities" Dec 05 08:38:07 crc kubenswrapper[4863]: E1205 08:38:07.771169 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="extract-content" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771177 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="extract-content" Dec 05 08:38:07 crc kubenswrapper[4863]: E1205 08:38:07.771194 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1986f6c-07f7-4f07-8fac-a10054e34670" containerName="bootstrap-openstack-openstack-cell1" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771200 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1986f6c-07f7-4f07-8fac-a10054e34670" containerName="bootstrap-openstack-openstack-cell1" Dec 05 08:38:07 crc kubenswrapper[4863]: E1205 08:38:07.771217 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="registry-server" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771223 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="registry-server" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771398 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="606fcc66-1682-48d4-b557-38a61e0ef505" containerName="registry-server" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.771433 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1986f6c-07f7-4f07-8fac-a10054e34670" containerName="bootstrap-openstack-openstack-cell1" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.772278 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.777176 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.777378 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.779641 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.780497 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.781581 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4k99f"] Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.938461 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.938972 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.939212 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:07 crc kubenswrapper[4863]: I1205 08:38:07.939560 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8nq7\" (UniqueName: \"kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.041379 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.041487 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8nq7\" (UniqueName: \"kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.041537 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.041578 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.045357 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.045971 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.046245 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.060559 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8nq7\" (UniqueName: \"kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7\") pod \"download-cache-openstack-openstack-cell1-4k99f\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.093842 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.669600 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4k99f"] Dec 05 08:38:08 crc kubenswrapper[4863]: I1205 08:38:08.691088 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" event={"ID":"61304558-f044-4fd0-82f7-d2533e210fc2","Type":"ContainerStarted","Data":"38c740fc353acf6f4a6655fca2d7b627752f1d46ba6d2d6b8520b9f6056dd6aa"} Dec 05 08:38:09 crc kubenswrapper[4863]: I1205 08:38:09.705176 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" event={"ID":"61304558-f044-4fd0-82f7-d2533e210fc2","Type":"ContainerStarted","Data":"3476bc0865709aecf17715ebbdee4cac9b17e9d89eabcd1936baf13e25684716"} Dec 05 08:38:09 crc kubenswrapper[4863]: I1205 08:38:09.730942 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" podStartSLOduration=2.241306723 podStartE2EDuration="2.730922314s" podCreationTimestamp="2025-12-05 08:38:07 +0000 UTC" firstStartedPulling="2025-12-05 08:38:08.667559756 +0000 UTC m=+6716.393556796" lastFinishedPulling="2025-12-05 08:38:09.157175337 +0000 UTC m=+6716.883172387" observedRunningTime="2025-12-05 08:38:09.726326483 +0000 UTC m=+6717.452323533" watchObservedRunningTime="2025-12-05 08:38:09.730922314 +0000 UTC m=+6717.456919354" Dec 05 08:38:21 crc kubenswrapper[4863]: I1205 08:38:21.601514 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:38:22 crc kubenswrapper[4863]: I1205 08:38:22.852030 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9"} Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.499416 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jrqll"] Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.503611 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.510231 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jrqll"] Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.640205 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bk7d\" (UniqueName: \"kubernetes.io/projected/38d25020-1ccd-4553-a6e0-959986c494aa-kube-api-access-9bk7d\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.640275 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-catalog-content\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.640489 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-utilities\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.742735 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-utilities\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.742825 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bk7d\" (UniqueName: \"kubernetes.io/projected/38d25020-1ccd-4553-a6e0-959986c494aa-kube-api-access-9bk7d\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.742858 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-catalog-content\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.743461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-utilities\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.743589 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38d25020-1ccd-4553-a6e0-959986c494aa-catalog-content\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.771028 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bk7d\" (UniqueName: \"kubernetes.io/projected/38d25020-1ccd-4553-a6e0-959986c494aa-kube-api-access-9bk7d\") pod \"certified-operators-jrqll\" (UID: \"38d25020-1ccd-4553-a6e0-959986c494aa\") " pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:25 crc kubenswrapper[4863]: I1205 08:39:25.823412 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:26 crc kubenswrapper[4863]: I1205 08:39:26.314297 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jrqll"] Dec 05 08:39:26 crc kubenswrapper[4863]: I1205 08:39:26.489172 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jrqll" event={"ID":"38d25020-1ccd-4553-a6e0-959986c494aa","Type":"ContainerStarted","Data":"b9382231e54548d08881c2035473731f6f5d813f57374041822448c82cbe1ed7"} Dec 05 08:39:27 crc kubenswrapper[4863]: I1205 08:39:27.503682 4863 generic.go:334] "Generic (PLEG): container finished" podID="38d25020-1ccd-4553-a6e0-959986c494aa" containerID="431ecf891e082fe7914fb6f6e76f830b2cd824076e4cd04668fc1dfa1a608cd6" exitCode=0 Dec 05 08:39:27 crc kubenswrapper[4863]: I1205 08:39:27.503751 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jrqll" event={"ID":"38d25020-1ccd-4553-a6e0-959986c494aa","Type":"ContainerDied","Data":"431ecf891e082fe7914fb6f6e76f830b2cd824076e4cd04668fc1dfa1a608cd6"} Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.529172 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.531590 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.543081 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.708023 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.708388 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7qg5\" (UniqueName: \"kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.708721 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.810683 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.810744 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7qg5\" (UniqueName: \"kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.810921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.811550 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.811825 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.844352 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7qg5\" (UniqueName: \"kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5\") pod \"redhat-marketplace-kdcbp\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:28 crc kubenswrapper[4863]: I1205 08:39:28.859298 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:29 crc kubenswrapper[4863]: I1205 08:39:29.351814 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:29 crc kubenswrapper[4863]: I1205 08:39:29.520373 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerStarted","Data":"eb5aaf0c520a7cf1e36b713d63aea44584a7540156190e9d8f6370903c3f94eb"} Dec 05 08:39:30 crc kubenswrapper[4863]: I1205 08:39:30.530340 4863 generic.go:334] "Generic (PLEG): container finished" podID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerID="1b220fc35f757d53a99f8287d8255a341dcd0bccf8bf9f470620057e05889c8f" exitCode=0 Dec 05 08:39:30 crc kubenswrapper[4863]: I1205 08:39:30.530378 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerDied","Data":"1b220fc35f757d53a99f8287d8255a341dcd0bccf8bf9f470620057e05889c8f"} Dec 05 08:39:34 crc kubenswrapper[4863]: E1205 08:39:34.358501 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38d25020_1ccd_4553_a6e0_959986c494aa.slice/crio-14cb1e03385ce99f50c2a741b0a3530e65d233767ce25134ad6b16951ffbc429.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38d25020_1ccd_4553_a6e0_959986c494aa.slice/crio-conmon-14cb1e03385ce99f50c2a741b0a3530e65d233767ce25134ad6b16951ffbc429.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:39:34 crc kubenswrapper[4863]: I1205 08:39:34.576389 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerStarted","Data":"ca76bffcfd233af33df5d8e129afbb03a1acf1d6cde03e66b76f92eeacf24192"} Dec 05 08:39:34 crc kubenswrapper[4863]: I1205 08:39:34.579577 4863 generic.go:334] "Generic (PLEG): container finished" podID="38d25020-1ccd-4553-a6e0-959986c494aa" containerID="14cb1e03385ce99f50c2a741b0a3530e65d233767ce25134ad6b16951ffbc429" exitCode=0 Dec 05 08:39:34 crc kubenswrapper[4863]: I1205 08:39:34.579655 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jrqll" event={"ID":"38d25020-1ccd-4553-a6e0-959986c494aa","Type":"ContainerDied","Data":"14cb1e03385ce99f50c2a741b0a3530e65d233767ce25134ad6b16951ffbc429"} Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.594730 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jrqll" event={"ID":"38d25020-1ccd-4553-a6e0-959986c494aa","Type":"ContainerStarted","Data":"cf7dfe632c15ad76bc0a607e07de42ec89af27b5a48af97384f772c827275fc0"} Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.597134 4863 generic.go:334] "Generic (PLEG): container finished" podID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerID="ca76bffcfd233af33df5d8e129afbb03a1acf1d6cde03e66b76f92eeacf24192" exitCode=0 Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.597200 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerDied","Data":"ca76bffcfd233af33df5d8e129afbb03a1acf1d6cde03e66b76f92eeacf24192"} Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.623309 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jrqll" podStartSLOduration=3.098297159 podStartE2EDuration="10.623280301s" podCreationTimestamp="2025-12-05 08:39:25 +0000 UTC" firstStartedPulling="2025-12-05 08:39:27.50638315 +0000 UTC m=+6795.232380200" lastFinishedPulling="2025-12-05 08:39:35.031366302 +0000 UTC m=+6802.757363342" observedRunningTime="2025-12-05 08:39:35.619783125 +0000 UTC m=+6803.345780195" watchObservedRunningTime="2025-12-05 08:39:35.623280301 +0000 UTC m=+6803.349277351" Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.824734 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:35 crc kubenswrapper[4863]: I1205 08:39:35.824790 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:36 crc kubenswrapper[4863]: I1205 08:39:36.885371 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jrqll" podUID="38d25020-1ccd-4553-a6e0-959986c494aa" containerName="registry-server" probeResult="failure" output=< Dec 05 08:39:36 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 08:39:36 crc kubenswrapper[4863]: > Dec 05 08:39:37 crc kubenswrapper[4863]: I1205 08:39:37.628616 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerStarted","Data":"d426435c74d16f9bc3917612435e0eb0b0b50ced5f7a03f087c75e4b8b856d0a"} Dec 05 08:39:37 crc kubenswrapper[4863]: I1205 08:39:37.657092 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kdcbp" podStartSLOduration=6.792913546 podStartE2EDuration="9.657066089s" podCreationTimestamp="2025-12-05 08:39:28 +0000 UTC" firstStartedPulling="2025-12-05 08:39:33.642503641 +0000 UTC m=+6801.368500681" lastFinishedPulling="2025-12-05 08:39:36.506656184 +0000 UTC m=+6804.232653224" observedRunningTime="2025-12-05 08:39:37.652111399 +0000 UTC m=+6805.378108449" watchObservedRunningTime="2025-12-05 08:39:37.657066089 +0000 UTC m=+6805.383063129" Dec 05 08:39:38 crc kubenswrapper[4863]: I1205 08:39:38.860073 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:38 crc kubenswrapper[4863]: I1205 08:39:38.860389 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:38 crc kubenswrapper[4863]: I1205 08:39:38.911631 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:45 crc kubenswrapper[4863]: I1205 08:39:45.871402 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:45 crc kubenswrapper[4863]: I1205 08:39:45.928119 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jrqll" Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.224977 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jrqll"] Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.386853 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.387440 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qpw49" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="registry-server" containerID="cri-o://39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" gracePeriod=2 Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.729462 4863 generic.go:334] "Generic (PLEG): container finished" podID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerID="39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" exitCode=0 Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.729859 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerDied","Data":"39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028"} Dec 05 08:39:46 crc kubenswrapper[4863]: E1205 08:39:46.759658 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028 is running failed: container process not found" containerID="39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:39:46 crc kubenswrapper[4863]: E1205 08:39:46.763573 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028 is running failed: container process not found" containerID="39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:39:46 crc kubenswrapper[4863]: E1205 08:39:46.764155 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028 is running failed: container process not found" containerID="39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:39:46 crc kubenswrapper[4863]: E1205 08:39:46.764211 4863 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-qpw49" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="registry-server" Dec 05 08:39:46 crc kubenswrapper[4863]: I1205 08:39:46.902531 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.031230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content\") pod \"f94c0747-2c1f-40fd-941d-e714db6709d5\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.031750 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcz5x\" (UniqueName: \"kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x\") pod \"f94c0747-2c1f-40fd-941d-e714db6709d5\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.031871 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities\") pod \"f94c0747-2c1f-40fd-941d-e714db6709d5\" (UID: \"f94c0747-2c1f-40fd-941d-e714db6709d5\") " Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.032860 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities" (OuterVolumeSpecName: "utilities") pod "f94c0747-2c1f-40fd-941d-e714db6709d5" (UID: "f94c0747-2c1f-40fd-941d-e714db6709d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.040070 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x" (OuterVolumeSpecName: "kube-api-access-dcz5x") pod "f94c0747-2c1f-40fd-941d-e714db6709d5" (UID: "f94c0747-2c1f-40fd-941d-e714db6709d5"). InnerVolumeSpecName "kube-api-access-dcz5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.111837 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f94c0747-2c1f-40fd-941d-e714db6709d5" (UID: "f94c0747-2c1f-40fd-941d-e714db6709d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.136046 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcz5x\" (UniqueName: \"kubernetes.io/projected/f94c0747-2c1f-40fd-941d-e714db6709d5-kube-api-access-dcz5x\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.136088 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.136098 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f94c0747-2c1f-40fd-941d-e714db6709d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.741064 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpw49" event={"ID":"f94c0747-2c1f-40fd-941d-e714db6709d5","Type":"ContainerDied","Data":"501784ac6206f655d2f7c746783da8c0b071560716624c8b7c37beb95d4f3b5d"} Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.741125 4863 scope.go:117] "RemoveContainer" containerID="39207ae83d81f5953d5fb01b88a78f5896aae1168c666a94aec2cc5205a26028" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.741291 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpw49" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.744853 4863 generic.go:334] "Generic (PLEG): container finished" podID="61304558-f044-4fd0-82f7-d2533e210fc2" containerID="3476bc0865709aecf17715ebbdee4cac9b17e9d89eabcd1936baf13e25684716" exitCode=0 Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.745560 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" event={"ID":"61304558-f044-4fd0-82f7-d2533e210fc2","Type":"ContainerDied","Data":"3476bc0865709aecf17715ebbdee4cac9b17e9d89eabcd1936baf13e25684716"} Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.782688 4863 scope.go:117] "RemoveContainer" containerID="c26886ad130e068fa17bf5bce03ebce2bb03a2b7051ebc88cad5a5ac59126e58" Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.816618 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.828602 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qpw49"] Dec 05 08:39:47 crc kubenswrapper[4863]: I1205 08:39:47.837814 4863 scope.go:117] "RemoveContainer" containerID="5adc91e3de2512051eda7786a20d97b7bd28bc5a3754f3282d3cc5889a8f2c94" Dec 05 08:39:48 crc kubenswrapper[4863]: I1205 08:39:48.620593 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" path="/var/lib/kubelet/pods/f94c0747-2c1f-40fd-941d-e714db6709d5/volumes" Dec 05 08:39:48 crc kubenswrapper[4863]: I1205 08:39:48.944597 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.329951 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.497788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory\") pod \"61304558-f044-4fd0-82f7-d2533e210fc2\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.497884 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key\") pod \"61304558-f044-4fd0-82f7-d2533e210fc2\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.498049 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8nq7\" (UniqueName: \"kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7\") pod \"61304558-f044-4fd0-82f7-d2533e210fc2\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.498124 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph\") pod \"61304558-f044-4fd0-82f7-d2533e210fc2\" (UID: \"61304558-f044-4fd0-82f7-d2533e210fc2\") " Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.504552 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph" (OuterVolumeSpecName: "ceph") pod "61304558-f044-4fd0-82f7-d2533e210fc2" (UID: "61304558-f044-4fd0-82f7-d2533e210fc2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.513639 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7" (OuterVolumeSpecName: "kube-api-access-c8nq7") pod "61304558-f044-4fd0-82f7-d2533e210fc2" (UID: "61304558-f044-4fd0-82f7-d2533e210fc2"). InnerVolumeSpecName "kube-api-access-c8nq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.530011 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61304558-f044-4fd0-82f7-d2533e210fc2" (UID: "61304558-f044-4fd0-82f7-d2533e210fc2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.530535 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory" (OuterVolumeSpecName: "inventory") pod "61304558-f044-4fd0-82f7-d2533e210fc2" (UID: "61304558-f044-4fd0-82f7-d2533e210fc2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.600364 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8nq7\" (UniqueName: \"kubernetes.io/projected/61304558-f044-4fd0-82f7-d2533e210fc2-kube-api-access-c8nq7\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.600412 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.600430 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.600442 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61304558-f044-4fd0-82f7-d2533e210fc2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.767108 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" event={"ID":"61304558-f044-4fd0-82f7-d2533e210fc2","Type":"ContainerDied","Data":"38c740fc353acf6f4a6655fca2d7b627752f1d46ba6d2d6b8520b9f6056dd6aa"} Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.767626 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38c740fc353acf6f4a6655fca2d7b627752f1d46ba6d2d6b8520b9f6056dd6aa" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.767498 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4k99f" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.866586 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hqvvh"] Dec 05 08:39:49 crc kubenswrapper[4863]: E1205 08:39:49.867125 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="extract-content" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867148 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="extract-content" Dec 05 08:39:49 crc kubenswrapper[4863]: E1205 08:39:49.867186 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="registry-server" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867194 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="registry-server" Dec 05 08:39:49 crc kubenswrapper[4863]: E1205 08:39:49.867205 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="extract-utilities" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867213 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="extract-utilities" Dec 05 08:39:49 crc kubenswrapper[4863]: E1205 08:39:49.867239 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61304558-f044-4fd0-82f7-d2533e210fc2" containerName="download-cache-openstack-openstack-cell1" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867246 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="61304558-f044-4fd0-82f7-d2533e210fc2" containerName="download-cache-openstack-openstack-cell1" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867646 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="61304558-f044-4fd0-82f7-d2533e210fc2" containerName="download-cache-openstack-openstack-cell1" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.867678 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f94c0747-2c1f-40fd-941d-e714db6709d5" containerName="registry-server" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.868640 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.871644 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.873018 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.873620 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.878200 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hqvvh"] Dec 05 08:39:49 crc kubenswrapper[4863]: I1205 08:39:49.895327 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.075385 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.075609 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.075704 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq2lm\" (UniqueName: \"kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.075861 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.177570 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.177636 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq2lm\" (UniqueName: \"kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.177719 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.177810 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.183410 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.183609 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.190139 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.196332 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq2lm\" (UniqueName: \"kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm\") pod \"configure-network-openstack-openstack-cell1-hqvvh\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.204962 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.586903 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.587518 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kdcbp" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="registry-server" containerID="cri-o://d426435c74d16f9bc3917612435e0eb0b0b50ced5f7a03f087c75e4b8b856d0a" gracePeriod=2 Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.770241 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hqvvh"] Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.785369 4863 generic.go:334] "Generic (PLEG): container finished" podID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerID="d426435c74d16f9bc3917612435e0eb0b0b50ced5f7a03f087c75e4b8b856d0a" exitCode=0 Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.785431 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerDied","Data":"d426435c74d16f9bc3917612435e0eb0b0b50ced5f7a03f087c75e4b8b856d0a"} Dec 05 08:39:50 crc kubenswrapper[4863]: W1205 08:39:50.791103 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaeaedb49_cd07_46e7_934f_f1b05a66d43e.slice/crio-7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d WatchSource:0}: Error finding container 7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d: Status 404 returned error can't find the container with id 7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d Dec 05 08:39:50 crc kubenswrapper[4863]: I1205 08:39:50.793772 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.056964 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.197218 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content\") pod \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.197502 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7qg5\" (UniqueName: \"kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5\") pod \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.197656 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities\") pod \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\" (UID: \"64ee22d2-d2b7-482d-b396-5bb79822b3a5\") " Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.198173 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities" (OuterVolumeSpecName: "utilities") pod "64ee22d2-d2b7-482d-b396-5bb79822b3a5" (UID: "64ee22d2-d2b7-482d-b396-5bb79822b3a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.203444 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5" (OuterVolumeSpecName: "kube-api-access-q7qg5") pod "64ee22d2-d2b7-482d-b396-5bb79822b3a5" (UID: "64ee22d2-d2b7-482d-b396-5bb79822b3a5"). InnerVolumeSpecName "kube-api-access-q7qg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.220041 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64ee22d2-d2b7-482d-b396-5bb79822b3a5" (UID: "64ee22d2-d2b7-482d-b396-5bb79822b3a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.299495 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7qg5\" (UniqueName: \"kubernetes.io/projected/64ee22d2-d2b7-482d-b396-5bb79822b3a5-kube-api-access-q7qg5\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.299850 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.299865 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64ee22d2-d2b7-482d-b396-5bb79822b3a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.796098 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" event={"ID":"aeaedb49-cd07-46e7-934f-f1b05a66d43e","Type":"ContainerStarted","Data":"eb0d0d4e5bac07a372683b0e3898b424b3e70bf7fc062d6298682ba4b9c823d1"} Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.796145 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" event={"ID":"aeaedb49-cd07-46e7-934f-f1b05a66d43e","Type":"ContainerStarted","Data":"7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d"} Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.798921 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdcbp" event={"ID":"64ee22d2-d2b7-482d-b396-5bb79822b3a5","Type":"ContainerDied","Data":"eb5aaf0c520a7cf1e36b713d63aea44584a7540156190e9d8f6370903c3f94eb"} Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.798972 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdcbp" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.798992 4863 scope.go:117] "RemoveContainer" containerID="d426435c74d16f9bc3917612435e0eb0b0b50ced5f7a03f087c75e4b8b856d0a" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.844350 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" podStartSLOduration=2.433542354 podStartE2EDuration="2.844084665s" podCreationTimestamp="2025-12-05 08:39:49 +0000 UTC" firstStartedPulling="2025-12-05 08:39:50.793546897 +0000 UTC m=+6818.519543937" lastFinishedPulling="2025-12-05 08:39:51.204089208 +0000 UTC m=+6818.930086248" observedRunningTime="2025-12-05 08:39:51.830667258 +0000 UTC m=+6819.556664298" watchObservedRunningTime="2025-12-05 08:39:51.844084665 +0000 UTC m=+6819.570081715" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.855681 4863 scope.go:117] "RemoveContainer" containerID="ca76bffcfd233af33df5d8e129afbb03a1acf1d6cde03e66b76f92eeacf24192" Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.859912 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.873273 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdcbp"] Dec 05 08:39:51 crc kubenswrapper[4863]: I1205 08:39:51.877465 4863 scope.go:117] "RemoveContainer" containerID="1b220fc35f757d53a99f8287d8255a341dcd0bccf8bf9f470620057e05889c8f" Dec 05 08:39:52 crc kubenswrapper[4863]: I1205 08:39:52.613761 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" path="/var/lib/kubelet/pods/64ee22d2-d2b7-482d-b396-5bb79822b3a5/volumes" Dec 05 08:40:38 crc kubenswrapper[4863]: I1205 08:40:38.464603 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:40:38 crc kubenswrapper[4863]: I1205 08:40:38.465226 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:41:08 crc kubenswrapper[4863]: I1205 08:41:08.464042 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:41:08 crc kubenswrapper[4863]: I1205 08:41:08.464634 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:41:09 crc kubenswrapper[4863]: I1205 08:41:09.570148 4863 generic.go:334] "Generic (PLEG): container finished" podID="aeaedb49-cd07-46e7-934f-f1b05a66d43e" containerID="eb0d0d4e5bac07a372683b0e3898b424b3e70bf7fc062d6298682ba4b9c823d1" exitCode=0 Dec 05 08:41:09 crc kubenswrapper[4863]: I1205 08:41:09.570276 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" event={"ID":"aeaedb49-cd07-46e7-934f-f1b05a66d43e","Type":"ContainerDied","Data":"eb0d0d4e5bac07a372683b0e3898b424b3e70bf7fc062d6298682ba4b9c823d1"} Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.017695 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.134384 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key\") pod \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.134451 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory\") pod \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.134512 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph\") pod \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.134613 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq2lm\" (UniqueName: \"kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm\") pod \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\" (UID: \"aeaedb49-cd07-46e7-934f-f1b05a66d43e\") " Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.141275 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph" (OuterVolumeSpecName: "ceph") pod "aeaedb49-cd07-46e7-934f-f1b05a66d43e" (UID: "aeaedb49-cd07-46e7-934f-f1b05a66d43e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.148830 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm" (OuterVolumeSpecName: "kube-api-access-rq2lm") pod "aeaedb49-cd07-46e7-934f-f1b05a66d43e" (UID: "aeaedb49-cd07-46e7-934f-f1b05a66d43e"). InnerVolumeSpecName "kube-api-access-rq2lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.168148 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aeaedb49-cd07-46e7-934f-f1b05a66d43e" (UID: "aeaedb49-cd07-46e7-934f-f1b05a66d43e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.168158 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory" (OuterVolumeSpecName: "inventory") pod "aeaedb49-cd07-46e7-934f-f1b05a66d43e" (UID: "aeaedb49-cd07-46e7-934f-f1b05a66d43e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.237500 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.237541 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.237555 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aeaedb49-cd07-46e7-934f-f1b05a66d43e-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.237569 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq2lm\" (UniqueName: \"kubernetes.io/projected/aeaedb49-cd07-46e7-934f-f1b05a66d43e-kube-api-access-rq2lm\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.589572 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" event={"ID":"aeaedb49-cd07-46e7-934f-f1b05a66d43e","Type":"ContainerDied","Data":"7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d"} Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.589615 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b0dfb79f51e462014fef70a8fc2ab97cdb199faa1d2b7687442342a0c98487d" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.591309 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hqvvh" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.683998 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-74xzf"] Dec 05 08:41:11 crc kubenswrapper[4863]: E1205 08:41:11.684515 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="extract-utilities" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684536 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="extract-utilities" Dec 05 08:41:11 crc kubenswrapper[4863]: E1205 08:41:11.684556 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="extract-content" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684562 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="extract-content" Dec 05 08:41:11 crc kubenswrapper[4863]: E1205 08:41:11.684572 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeaedb49-cd07-46e7-934f-f1b05a66d43e" containerName="configure-network-openstack-openstack-cell1" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684579 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeaedb49-cd07-46e7-934f-f1b05a66d43e" containerName="configure-network-openstack-openstack-cell1" Dec 05 08:41:11 crc kubenswrapper[4863]: E1205 08:41:11.684593 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="registry-server" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684599 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="registry-server" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684812 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="64ee22d2-d2b7-482d-b396-5bb79822b3a5" containerName="registry-server" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.684832 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeaedb49-cd07-46e7-934f-f1b05a66d43e" containerName="configure-network-openstack-openstack-cell1" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.685580 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.688108 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.688572 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.689274 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.689878 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.694336 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-74xzf"] Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.850548 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.850605 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpxbl\" (UniqueName: \"kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.850648 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.850716 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.953213 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.953423 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.953692 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.953738 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpxbl\" (UniqueName: \"kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.958599 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.958623 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.967282 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:11 crc kubenswrapper[4863]: I1205 08:41:11.982935 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpxbl\" (UniqueName: \"kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl\") pod \"validate-network-openstack-openstack-cell1-74xzf\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:12 crc kubenswrapper[4863]: I1205 08:41:12.009657 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:12 crc kubenswrapper[4863]: I1205 08:41:12.359161 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-74xzf"] Dec 05 08:41:12 crc kubenswrapper[4863]: I1205 08:41:12.599526 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" event={"ID":"8dddadd9-2948-426d-80e5-a47c45569ec6","Type":"ContainerStarted","Data":"a81da72a56c590f51e03419a54446f483961a2ce9b6c56d22f19d7372e9ce357"} Dec 05 08:41:12 crc kubenswrapper[4863]: I1205 08:41:12.821739 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:41:13 crc kubenswrapper[4863]: I1205 08:41:13.633176 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" event={"ID":"8dddadd9-2948-426d-80e5-a47c45569ec6","Type":"ContainerStarted","Data":"db18a61252acbc024ae73e6a97c3789ac08f6f63fa28aea4acf1a53b5548e43a"} Dec 05 08:41:13 crc kubenswrapper[4863]: I1205 08:41:13.660685 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" podStartSLOduration=2.198995403 podStartE2EDuration="2.660667415s" podCreationTimestamp="2025-12-05 08:41:11 +0000 UTC" firstStartedPulling="2025-12-05 08:41:12.357501907 +0000 UTC m=+6900.083498947" lastFinishedPulling="2025-12-05 08:41:12.819173929 +0000 UTC m=+6900.545170959" observedRunningTime="2025-12-05 08:41:13.655778726 +0000 UTC m=+6901.381775756" watchObservedRunningTime="2025-12-05 08:41:13.660667415 +0000 UTC m=+6901.386664455" Dec 05 08:41:18 crc kubenswrapper[4863]: I1205 08:41:18.689188 4863 generic.go:334] "Generic (PLEG): container finished" podID="8dddadd9-2948-426d-80e5-a47c45569ec6" containerID="db18a61252acbc024ae73e6a97c3789ac08f6f63fa28aea4acf1a53b5548e43a" exitCode=0 Dec 05 08:41:18 crc kubenswrapper[4863]: I1205 08:41:18.689294 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" event={"ID":"8dddadd9-2948-426d-80e5-a47c45569ec6","Type":"ContainerDied","Data":"db18a61252acbc024ae73e6a97c3789ac08f6f63fa28aea4acf1a53b5548e43a"} Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.126627 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.230052 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key\") pod \"8dddadd9-2948-426d-80e5-a47c45569ec6\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.230231 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory\") pod \"8dddadd9-2948-426d-80e5-a47c45569ec6\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.230320 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpxbl\" (UniqueName: \"kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl\") pod \"8dddadd9-2948-426d-80e5-a47c45569ec6\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.230395 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph\") pod \"8dddadd9-2948-426d-80e5-a47c45569ec6\" (UID: \"8dddadd9-2948-426d-80e5-a47c45569ec6\") " Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.236780 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl" (OuterVolumeSpecName: "kube-api-access-fpxbl") pod "8dddadd9-2948-426d-80e5-a47c45569ec6" (UID: "8dddadd9-2948-426d-80e5-a47c45569ec6"). InnerVolumeSpecName "kube-api-access-fpxbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.237235 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph" (OuterVolumeSpecName: "ceph") pod "8dddadd9-2948-426d-80e5-a47c45569ec6" (UID: "8dddadd9-2948-426d-80e5-a47c45569ec6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.262019 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory" (OuterVolumeSpecName: "inventory") pod "8dddadd9-2948-426d-80e5-a47c45569ec6" (UID: "8dddadd9-2948-426d-80e5-a47c45569ec6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.263835 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8dddadd9-2948-426d-80e5-a47c45569ec6" (UID: "8dddadd9-2948-426d-80e5-a47c45569ec6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.333343 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.333400 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpxbl\" (UniqueName: \"kubernetes.io/projected/8dddadd9-2948-426d-80e5-a47c45569ec6-kube-api-access-fpxbl\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.333413 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.333425 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8dddadd9-2948-426d-80e5-a47c45569ec6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.713433 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" event={"ID":"8dddadd9-2948-426d-80e5-a47c45569ec6","Type":"ContainerDied","Data":"a81da72a56c590f51e03419a54446f483961a2ce9b6c56d22f19d7372e9ce357"} Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.713704 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a81da72a56c590f51e03419a54446f483961a2ce9b6c56d22f19d7372e9ce357" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.713833 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-74xzf" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.789103 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q4p64"] Dec 05 08:41:20 crc kubenswrapper[4863]: E1205 08:41:20.789846 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dddadd9-2948-426d-80e5-a47c45569ec6" containerName="validate-network-openstack-openstack-cell1" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.789962 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dddadd9-2948-426d-80e5-a47c45569ec6" containerName="validate-network-openstack-openstack-cell1" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.790376 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dddadd9-2948-426d-80e5-a47c45569ec6" containerName="validate-network-openstack-openstack-cell1" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.791432 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.803395 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q4p64"] Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.808061 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.808611 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.809026 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.809863 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.843760 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.843814 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.843913 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpr9w\" (UniqueName: \"kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.844019 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.945937 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.946309 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.946364 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpr9w\" (UniqueName: \"kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.946488 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.952518 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.952575 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.954620 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:20 crc kubenswrapper[4863]: I1205 08:41:20.972386 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpr9w\" (UniqueName: \"kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w\") pod \"install-os-openstack-openstack-cell1-q4p64\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:21 crc kubenswrapper[4863]: I1205 08:41:21.122899 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:41:21 crc kubenswrapper[4863]: I1205 08:41:21.688335 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-q4p64"] Dec 05 08:41:21 crc kubenswrapper[4863]: I1205 08:41:21.725068 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q4p64" event={"ID":"bf7632e1-1460-44a9-95ab-45f0b0b94728","Type":"ContainerStarted","Data":"51b61dae851e037db8fdb960060986a1ef20f98e9760cd447cbdba2cb83cd88a"} Dec 05 08:41:22 crc kubenswrapper[4863]: I1205 08:41:22.735002 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q4p64" event={"ID":"bf7632e1-1460-44a9-95ab-45f0b0b94728","Type":"ContainerStarted","Data":"5772fa5d497a2c8f3e9503e26c8cae8fdbf5c19fa81c7ee275450c7664d32df2"} Dec 05 08:41:22 crc kubenswrapper[4863]: I1205 08:41:22.765856 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-q4p64" podStartSLOduration=2.368862629 podStartE2EDuration="2.765835438s" podCreationTimestamp="2025-12-05 08:41:20 +0000 UTC" firstStartedPulling="2025-12-05 08:41:21.68807073 +0000 UTC m=+6909.414067760" lastFinishedPulling="2025-12-05 08:41:22.085043529 +0000 UTC m=+6909.811040569" observedRunningTime="2025-12-05 08:41:22.75275063 +0000 UTC m=+6910.478747670" watchObservedRunningTime="2025-12-05 08:41:22.765835438 +0000 UTC m=+6910.491832478" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.648513 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.652676 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.662555 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.830561 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.830811 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sltw6\" (UniqueName: \"kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.830925 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.933377 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.933621 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sltw6\" (UniqueName: \"kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.933685 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.933906 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.934161 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.954374 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sltw6\" (UniqueName: \"kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6\") pod \"redhat-operators-n874p\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:27 crc kubenswrapper[4863]: I1205 08:41:27.983789 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:28 crc kubenswrapper[4863]: I1205 08:41:28.557026 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:28 crc kubenswrapper[4863]: I1205 08:41:28.808617 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerStarted","Data":"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa"} Dec 05 08:41:28 crc kubenswrapper[4863]: I1205 08:41:28.808673 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerStarted","Data":"c11b2c4f3a029064fe617f90b3c9fb06071be2b1c6d6bb97eb511784d5adf467"} Dec 05 08:41:29 crc kubenswrapper[4863]: I1205 08:41:29.821200 4863 generic.go:334] "Generic (PLEG): container finished" podID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerID="6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa" exitCode=0 Dec 05 08:41:29 crc kubenswrapper[4863]: I1205 08:41:29.821331 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerDied","Data":"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa"} Dec 05 08:41:30 crc kubenswrapper[4863]: I1205 08:41:30.833216 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerStarted","Data":"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec"} Dec 05 08:41:34 crc kubenswrapper[4863]: I1205 08:41:34.879455 4863 generic.go:334] "Generic (PLEG): container finished" podID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerID="a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec" exitCode=0 Dec 05 08:41:34 crc kubenswrapper[4863]: I1205 08:41:34.879493 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerDied","Data":"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec"} Dec 05 08:41:35 crc kubenswrapper[4863]: I1205 08:41:35.892165 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerStarted","Data":"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d"} Dec 05 08:41:35 crc kubenswrapper[4863]: I1205 08:41:35.913722 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n874p" podStartSLOduration=3.186025193 podStartE2EDuration="8.913694704s" podCreationTimestamp="2025-12-05 08:41:27 +0000 UTC" firstStartedPulling="2025-12-05 08:41:29.825984631 +0000 UTC m=+6917.551981671" lastFinishedPulling="2025-12-05 08:41:35.553654122 +0000 UTC m=+6923.279651182" observedRunningTime="2025-12-05 08:41:35.911418468 +0000 UTC m=+6923.637415518" watchObservedRunningTime="2025-12-05 08:41:35.913694704 +0000 UTC m=+6923.639691744" Dec 05 08:41:37 crc kubenswrapper[4863]: I1205 08:41:37.983981 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:37 crc kubenswrapper[4863]: I1205 08:41:37.984591 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.464050 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.464115 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.464161 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.465025 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.465087 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9" gracePeriod=600 Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.921768 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9" exitCode=0 Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.921830 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9"} Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.922124 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296"} Dec 05 08:41:38 crc kubenswrapper[4863]: I1205 08:41:38.922146 4863 scope.go:117] "RemoveContainer" containerID="933b49fe51c651a9d55b89843d6798c7536f0be670b8286dcb41c1d9d4839ef6" Dec 05 08:41:39 crc kubenswrapper[4863]: I1205 08:41:39.030739 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n874p" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="registry-server" probeResult="failure" output=< Dec 05 08:41:39 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 08:41:39 crc kubenswrapper[4863]: > Dec 05 08:41:48 crc kubenswrapper[4863]: I1205 08:41:48.051499 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:48 crc kubenswrapper[4863]: I1205 08:41:48.123775 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:48 crc kubenswrapper[4863]: I1205 08:41:48.293794 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.026802 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n874p" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="registry-server" containerID="cri-o://fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d" gracePeriod=2 Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.528221 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.656429 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities\") pod \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.656503 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sltw6\" (UniqueName: \"kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6\") pod \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.656535 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content\") pod \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\" (UID: \"e3eff6f8-2ddb-46fb-b985-278fd17aaa09\") " Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.657574 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities" (OuterVolumeSpecName: "utilities") pod "e3eff6f8-2ddb-46fb-b985-278fd17aaa09" (UID: "e3eff6f8-2ddb-46fb-b985-278fd17aaa09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.662690 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6" (OuterVolumeSpecName: "kube-api-access-sltw6") pod "e3eff6f8-2ddb-46fb-b985-278fd17aaa09" (UID: "e3eff6f8-2ddb-46fb-b985-278fd17aaa09"). InnerVolumeSpecName "kube-api-access-sltw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.759128 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.759162 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sltw6\" (UniqueName: \"kubernetes.io/projected/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-kube-api-access-sltw6\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.766290 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3eff6f8-2ddb-46fb-b985-278fd17aaa09" (UID: "e3eff6f8-2ddb-46fb-b985-278fd17aaa09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:41:50 crc kubenswrapper[4863]: I1205 08:41:50.861342 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3eff6f8-2ddb-46fb-b985-278fd17aaa09-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.038514 4863 generic.go:334] "Generic (PLEG): container finished" podID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerID="fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d" exitCode=0 Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.038563 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerDied","Data":"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d"} Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.038599 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n874p" event={"ID":"e3eff6f8-2ddb-46fb-b985-278fd17aaa09","Type":"ContainerDied","Data":"c11b2c4f3a029064fe617f90b3c9fb06071be2b1c6d6bb97eb511784d5adf467"} Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.038619 4863 scope.go:117] "RemoveContainer" containerID="fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.038612 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n874p" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.063132 4863 scope.go:117] "RemoveContainer" containerID="a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.095850 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.106064 4863 scope.go:117] "RemoveContainer" containerID="6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.109781 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n874p"] Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.145634 4863 scope.go:117] "RemoveContainer" containerID="fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d" Dec 05 08:41:51 crc kubenswrapper[4863]: E1205 08:41:51.146143 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d\": container with ID starting with fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d not found: ID does not exist" containerID="fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.146281 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d"} err="failed to get container status \"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d\": rpc error: code = NotFound desc = could not find container \"fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d\": container with ID starting with fe0049d94ed4822cd952862ad800ce2cc3421afb9a7178a8c301ba83682a0b0d not found: ID does not exist" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.146450 4863 scope.go:117] "RemoveContainer" containerID="a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec" Dec 05 08:41:51 crc kubenswrapper[4863]: E1205 08:41:51.146919 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec\": container with ID starting with a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec not found: ID does not exist" containerID="a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.147046 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec"} err="failed to get container status \"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec\": rpc error: code = NotFound desc = could not find container \"a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec\": container with ID starting with a61b3aa4be2f1cee535a791e04274b9c12524509b2cf18b8aa4ba00e6b5c21ec not found: ID does not exist" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.147150 4863 scope.go:117] "RemoveContainer" containerID="6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa" Dec 05 08:41:51 crc kubenswrapper[4863]: E1205 08:41:51.147514 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa\": container with ID starting with 6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa not found: ID does not exist" containerID="6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa" Dec 05 08:41:51 crc kubenswrapper[4863]: I1205 08:41:51.147616 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa"} err="failed to get container status \"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa\": rpc error: code = NotFound desc = could not find container \"6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa\": container with ID starting with 6589cd1d7784995fbb7276e5e31bd26dcddb2296524242c2823189b3bff662fa not found: ID does not exist" Dec 05 08:41:52 crc kubenswrapper[4863]: I1205 08:41:52.615655 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" path="/var/lib/kubelet/pods/e3eff6f8-2ddb-46fb-b985-278fd17aaa09/volumes" Dec 05 08:42:10 crc kubenswrapper[4863]: I1205 08:42:10.279782 4863 generic.go:334] "Generic (PLEG): container finished" podID="bf7632e1-1460-44a9-95ab-45f0b0b94728" containerID="5772fa5d497a2c8f3e9503e26c8cae8fdbf5c19fa81c7ee275450c7664d32df2" exitCode=0 Dec 05 08:42:10 crc kubenswrapper[4863]: I1205 08:42:10.279880 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q4p64" event={"ID":"bf7632e1-1460-44a9-95ab-45f0b0b94728","Type":"ContainerDied","Data":"5772fa5d497a2c8f3e9503e26c8cae8fdbf5c19fa81c7ee275450c7664d32df2"} Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.826393 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.958794 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key\") pod \"bf7632e1-1460-44a9-95ab-45f0b0b94728\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.958875 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpr9w\" (UniqueName: \"kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w\") pod \"bf7632e1-1460-44a9-95ab-45f0b0b94728\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.958904 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph\") pod \"bf7632e1-1460-44a9-95ab-45f0b0b94728\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.959064 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory\") pod \"bf7632e1-1460-44a9-95ab-45f0b0b94728\" (UID: \"bf7632e1-1460-44a9-95ab-45f0b0b94728\") " Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.964573 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w" (OuterVolumeSpecName: "kube-api-access-hpr9w") pod "bf7632e1-1460-44a9-95ab-45f0b0b94728" (UID: "bf7632e1-1460-44a9-95ab-45f0b0b94728"). InnerVolumeSpecName "kube-api-access-hpr9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.964646 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph" (OuterVolumeSpecName: "ceph") pod "bf7632e1-1460-44a9-95ab-45f0b0b94728" (UID: "bf7632e1-1460-44a9-95ab-45f0b0b94728"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.989080 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf7632e1-1460-44a9-95ab-45f0b0b94728" (UID: "bf7632e1-1460-44a9-95ab-45f0b0b94728"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4863]: I1205 08:42:11.992169 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory" (OuterVolumeSpecName: "inventory") pod "bf7632e1-1460-44a9-95ab-45f0b0b94728" (UID: "bf7632e1-1460-44a9-95ab-45f0b0b94728"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.062815 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.062856 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.062872 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpr9w\" (UniqueName: \"kubernetes.io/projected/bf7632e1-1460-44a9-95ab-45f0b0b94728-kube-api-access-hpr9w\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.062888 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf7632e1-1460-44a9-95ab-45f0b0b94728-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.307576 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-q4p64" event={"ID":"bf7632e1-1460-44a9-95ab-45f0b0b94728","Type":"ContainerDied","Data":"51b61dae851e037db8fdb960060986a1ef20f98e9760cd447cbdba2cb83cd88a"} Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.307633 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51b61dae851e037db8fdb960060986a1ef20f98e9760cd447cbdba2cb83cd88a" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.307744 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-q4p64" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.437311 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-m9fbt"] Dec 05 08:42:12 crc kubenswrapper[4863]: E1205 08:42:12.437989 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="extract-content" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438073 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="extract-content" Dec 05 08:42:12 crc kubenswrapper[4863]: E1205 08:42:12.438164 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="extract-utilities" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438251 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="extract-utilities" Dec 05 08:42:12 crc kubenswrapper[4863]: E1205 08:42:12.438332 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf7632e1-1460-44a9-95ab-45f0b0b94728" containerName="install-os-openstack-openstack-cell1" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438394 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf7632e1-1460-44a9-95ab-45f0b0b94728" containerName="install-os-openstack-openstack-cell1" Dec 05 08:42:12 crc kubenswrapper[4863]: E1205 08:42:12.438460 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="registry-server" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438542 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="registry-server" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438826 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3eff6f8-2ddb-46fb-b985-278fd17aaa09" containerName="registry-server" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.438924 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf7632e1-1460-44a9-95ab-45f0b0b94728" containerName="install-os-openstack-openstack-cell1" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.439755 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.442663 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.445332 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.445678 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.445884 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.458404 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-m9fbt"] Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.573526 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.573902 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tvlr\" (UniqueName: \"kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.573939 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.573988 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.675545 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.675655 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tvlr\" (UniqueName: \"kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.675685 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.675720 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.677881 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.678416 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.681213 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.689537 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.689751 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.693236 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tvlr\" (UniqueName: \"kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr\") pod \"configure-os-openstack-openstack-cell1-m9fbt\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.778385 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:42:12 crc kubenswrapper[4863]: I1205 08:42:12.787495 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:42:13 crc kubenswrapper[4863]: I1205 08:42:13.292182 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-m9fbt"] Dec 05 08:42:13 crc kubenswrapper[4863]: I1205 08:42:13.320968 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" event={"ID":"956afa12-575f-40e0-af41-fbb3accd6ad5","Type":"ContainerStarted","Data":"b66aead927dbde2cc6e6cac1c2f7ecbac800ea719c0a29bae0a64d29e4355177"} Dec 05 08:42:13 crc kubenswrapper[4863]: I1205 08:42:13.753678 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:42:14 crc kubenswrapper[4863]: I1205 08:42:14.329904 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" event={"ID":"956afa12-575f-40e0-af41-fbb3accd6ad5","Type":"ContainerStarted","Data":"41feb19705f24f993bcaa9d6e5c274ddc64d44ad9257279620a34f7b3906d392"} Dec 05 08:42:14 crc kubenswrapper[4863]: I1205 08:42:14.354919 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" podStartSLOduration=1.899456062 podStartE2EDuration="2.354900283s" podCreationTimestamp="2025-12-05 08:42:12 +0000 UTC" firstStartedPulling="2025-12-05 08:42:13.293697017 +0000 UTC m=+6961.019694057" lastFinishedPulling="2025-12-05 08:42:13.749141188 +0000 UTC m=+6961.475138278" observedRunningTime="2025-12-05 08:42:14.347258678 +0000 UTC m=+6962.073255748" watchObservedRunningTime="2025-12-05 08:42:14.354900283 +0000 UTC m=+6962.080897323" Dec 05 08:42:58 crc kubenswrapper[4863]: I1205 08:42:58.767612 4863 generic.go:334] "Generic (PLEG): container finished" podID="956afa12-575f-40e0-af41-fbb3accd6ad5" containerID="41feb19705f24f993bcaa9d6e5c274ddc64d44ad9257279620a34f7b3906d392" exitCode=0 Dec 05 08:42:58 crc kubenswrapper[4863]: I1205 08:42:58.767689 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" event={"ID":"956afa12-575f-40e0-af41-fbb3accd6ad5","Type":"ContainerDied","Data":"41feb19705f24f993bcaa9d6e5c274ddc64d44ad9257279620a34f7b3906d392"} Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.238233 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.416661 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph\") pod \"956afa12-575f-40e0-af41-fbb3accd6ad5\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.416748 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key\") pod \"956afa12-575f-40e0-af41-fbb3accd6ad5\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.416982 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tvlr\" (UniqueName: \"kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr\") pod \"956afa12-575f-40e0-af41-fbb3accd6ad5\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.417109 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory\") pod \"956afa12-575f-40e0-af41-fbb3accd6ad5\" (UID: \"956afa12-575f-40e0-af41-fbb3accd6ad5\") " Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.425505 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr" (OuterVolumeSpecName: "kube-api-access-4tvlr") pod "956afa12-575f-40e0-af41-fbb3accd6ad5" (UID: "956afa12-575f-40e0-af41-fbb3accd6ad5"). InnerVolumeSpecName "kube-api-access-4tvlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.452957 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph" (OuterVolumeSpecName: "ceph") pod "956afa12-575f-40e0-af41-fbb3accd6ad5" (UID: "956afa12-575f-40e0-af41-fbb3accd6ad5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.515808 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory" (OuterVolumeSpecName: "inventory") pod "956afa12-575f-40e0-af41-fbb3accd6ad5" (UID: "956afa12-575f-40e0-af41-fbb3accd6ad5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.530285 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tvlr\" (UniqueName: \"kubernetes.io/projected/956afa12-575f-40e0-af41-fbb3accd6ad5-kube-api-access-4tvlr\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.530348 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.530358 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.551688 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "956afa12-575f-40e0-af41-fbb3accd6ad5" (UID: "956afa12-575f-40e0-af41-fbb3accd6ad5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.632832 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/956afa12-575f-40e0-af41-fbb3accd6ad5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.789512 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" event={"ID":"956afa12-575f-40e0-af41-fbb3accd6ad5","Type":"ContainerDied","Data":"b66aead927dbde2cc6e6cac1c2f7ecbac800ea719c0a29bae0a64d29e4355177"} Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.789581 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b66aead927dbde2cc6e6cac1c2f7ecbac800ea719c0a29bae0a64d29e4355177" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.789582 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-m9fbt" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.864048 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-rqfjv"] Dec 05 08:43:00 crc kubenswrapper[4863]: E1205 08:43:00.864453 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956afa12-575f-40e0-af41-fbb3accd6ad5" containerName="configure-os-openstack-openstack-cell1" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.864486 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="956afa12-575f-40e0-af41-fbb3accd6ad5" containerName="configure-os-openstack-openstack-cell1" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.864701 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="956afa12-575f-40e0-af41-fbb3accd6ad5" containerName="configure-os-openstack-openstack-cell1" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.865406 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.867485 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.867648 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.867787 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.868578 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.880850 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-rqfjv"] Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.938024 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.938067 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2qfn\" (UniqueName: \"kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.938319 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:00 crc kubenswrapper[4863]: I1205 08:43:00.938586 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.039748 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.039889 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.039953 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.039982 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2qfn\" (UniqueName: \"kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.043513 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.044104 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.044121 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.056057 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2qfn\" (UniqueName: \"kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn\") pod \"ssh-known-hosts-openstack-rqfjv\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.185376 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.749796 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-rqfjv"] Dec 05 08:43:01 crc kubenswrapper[4863]: I1205 08:43:01.800267 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-rqfjv" event={"ID":"02944a5b-402f-48e5-9c09-f39051d9a0d4","Type":"ContainerStarted","Data":"f82b0dc72e2ca91defd8c5ad6b64e1d55ba987a5e65f52a6027d5b3105327927"} Dec 05 08:43:02 crc kubenswrapper[4863]: I1205 08:43:02.813514 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-rqfjv" event={"ID":"02944a5b-402f-48e5-9c09-f39051d9a0d4","Type":"ContainerStarted","Data":"edfc88b33ad775d5c11861068d28ccb91ff7478b0e37745122928760eb6410e2"} Dec 05 08:43:02 crc kubenswrapper[4863]: I1205 08:43:02.841278 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-rqfjv" podStartSLOduration=2.419305368 podStartE2EDuration="2.841254375s" podCreationTimestamp="2025-12-05 08:43:00 +0000 UTC" firstStartedPulling="2025-12-05 08:43:01.760012231 +0000 UTC m=+7009.486009271" lastFinishedPulling="2025-12-05 08:43:02.181961238 +0000 UTC m=+7009.907958278" observedRunningTime="2025-12-05 08:43:02.828448423 +0000 UTC m=+7010.554445463" watchObservedRunningTime="2025-12-05 08:43:02.841254375 +0000 UTC m=+7010.567251425" Dec 05 08:43:10 crc kubenswrapper[4863]: I1205 08:43:10.896903 4863 generic.go:334] "Generic (PLEG): container finished" podID="02944a5b-402f-48e5-9c09-f39051d9a0d4" containerID="edfc88b33ad775d5c11861068d28ccb91ff7478b0e37745122928760eb6410e2" exitCode=0 Dec 05 08:43:10 crc kubenswrapper[4863]: I1205 08:43:10.896986 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-rqfjv" event={"ID":"02944a5b-402f-48e5-9c09-f39051d9a0d4","Type":"ContainerDied","Data":"edfc88b33ad775d5c11861068d28ccb91ff7478b0e37745122928760eb6410e2"} Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.309235 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.370876 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0\") pod \"02944a5b-402f-48e5-9c09-f39051d9a0d4\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.371061 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1\") pod \"02944a5b-402f-48e5-9c09-f39051d9a0d4\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.371109 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph\") pod \"02944a5b-402f-48e5-9c09-f39051d9a0d4\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.371139 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2qfn\" (UniqueName: \"kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn\") pod \"02944a5b-402f-48e5-9c09-f39051d9a0d4\" (UID: \"02944a5b-402f-48e5-9c09-f39051d9a0d4\") " Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.378105 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn" (OuterVolumeSpecName: "kube-api-access-c2qfn") pod "02944a5b-402f-48e5-9c09-f39051d9a0d4" (UID: "02944a5b-402f-48e5-9c09-f39051d9a0d4"). InnerVolumeSpecName "kube-api-access-c2qfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.379615 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph" (OuterVolumeSpecName: "ceph") pod "02944a5b-402f-48e5-9c09-f39051d9a0d4" (UID: "02944a5b-402f-48e5-9c09-f39051d9a0d4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.403936 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "02944a5b-402f-48e5-9c09-f39051d9a0d4" (UID: "02944a5b-402f-48e5-9c09-f39051d9a0d4"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.409718 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "02944a5b-402f-48e5-9c09-f39051d9a0d4" (UID: "02944a5b-402f-48e5-9c09-f39051d9a0d4"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.474891 4863 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.474951 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.474966 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02944a5b-402f-48e5-9c09-f39051d9a0d4-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.474976 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2qfn\" (UniqueName: \"kubernetes.io/projected/02944a5b-402f-48e5-9c09-f39051d9a0d4-kube-api-access-c2qfn\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.920323 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-rqfjv" event={"ID":"02944a5b-402f-48e5-9c09-f39051d9a0d4","Type":"ContainerDied","Data":"f82b0dc72e2ca91defd8c5ad6b64e1d55ba987a5e65f52a6027d5b3105327927"} Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.920903 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f82b0dc72e2ca91defd8c5ad6b64e1d55ba987a5e65f52a6027d5b3105327927" Dec 05 08:43:12 crc kubenswrapper[4863]: I1205 08:43:12.921109 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-rqfjv" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.007014 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-chtq4"] Dec 05 08:43:13 crc kubenswrapper[4863]: E1205 08:43:13.007410 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02944a5b-402f-48e5-9c09-f39051d9a0d4" containerName="ssh-known-hosts-openstack" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.007427 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="02944a5b-402f-48e5-9c09-f39051d9a0d4" containerName="ssh-known-hosts-openstack" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.007703 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="02944a5b-402f-48e5-9c09-f39051d9a0d4" containerName="ssh-known-hosts-openstack" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.008420 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.010941 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.010997 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.011265 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.017157 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.018355 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-chtq4"] Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.092454 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.092550 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.092608 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmz4h\" (UniqueName: \"kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.092638 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.194052 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.194259 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.194337 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.194397 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmz4h\" (UniqueName: \"kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.199843 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.199924 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.200167 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.212967 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmz4h\" (UniqueName: \"kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h\") pod \"run-os-openstack-openstack-cell1-chtq4\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.327285 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.875126 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-chtq4"] Dec 05 08:43:13 crc kubenswrapper[4863]: I1205 08:43:13.934338 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-chtq4" event={"ID":"4e547abc-412f-415b-ae64-4df536300d7d","Type":"ContainerStarted","Data":"524991f6d19f1e0db485f2924ead2e418c535632302e78c9a6c99b0368e770a2"} Dec 05 08:43:14 crc kubenswrapper[4863]: I1205 08:43:14.945400 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-chtq4" event={"ID":"4e547abc-412f-415b-ae64-4df536300d7d","Type":"ContainerStarted","Data":"3fea2b7fd764b7bf271d05d8fd2865e96583b0e1007dc1db1ada56e6c1cf9f5e"} Dec 05 08:43:14 crc kubenswrapper[4863]: I1205 08:43:14.981810 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-chtq4" podStartSLOduration=2.501477919 podStartE2EDuration="2.981789264s" podCreationTimestamp="2025-12-05 08:43:12 +0000 UTC" firstStartedPulling="2025-12-05 08:43:13.874320553 +0000 UTC m=+7021.600317593" lastFinishedPulling="2025-12-05 08:43:14.354631898 +0000 UTC m=+7022.080628938" observedRunningTime="2025-12-05 08:43:14.962766031 +0000 UTC m=+7022.688763081" watchObservedRunningTime="2025-12-05 08:43:14.981789264 +0000 UTC m=+7022.707786304" Dec 05 08:43:24 crc kubenswrapper[4863]: I1205 08:43:24.028429 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e547abc-412f-415b-ae64-4df536300d7d" containerID="3fea2b7fd764b7bf271d05d8fd2865e96583b0e1007dc1db1ada56e6c1cf9f5e" exitCode=0 Dec 05 08:43:24 crc kubenswrapper[4863]: I1205 08:43:24.028521 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-chtq4" event={"ID":"4e547abc-412f-415b-ae64-4df536300d7d","Type":"ContainerDied","Data":"3fea2b7fd764b7bf271d05d8fd2865e96583b0e1007dc1db1ada56e6c1cf9f5e"} Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.504374 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.586788 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key\") pod \"4e547abc-412f-415b-ae64-4df536300d7d\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.586843 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory\") pod \"4e547abc-412f-415b-ae64-4df536300d7d\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.587057 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmz4h\" (UniqueName: \"kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h\") pod \"4e547abc-412f-415b-ae64-4df536300d7d\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.587120 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph\") pod \"4e547abc-412f-415b-ae64-4df536300d7d\" (UID: \"4e547abc-412f-415b-ae64-4df536300d7d\") " Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.592042 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h" (OuterVolumeSpecName: "kube-api-access-tmz4h") pod "4e547abc-412f-415b-ae64-4df536300d7d" (UID: "4e547abc-412f-415b-ae64-4df536300d7d"). InnerVolumeSpecName "kube-api-access-tmz4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.598784 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph" (OuterVolumeSpecName: "ceph") pod "4e547abc-412f-415b-ae64-4df536300d7d" (UID: "4e547abc-412f-415b-ae64-4df536300d7d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.613048 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory" (OuterVolumeSpecName: "inventory") pod "4e547abc-412f-415b-ae64-4df536300d7d" (UID: "4e547abc-412f-415b-ae64-4df536300d7d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.616920 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e547abc-412f-415b-ae64-4df536300d7d" (UID: "4e547abc-412f-415b-ae64-4df536300d7d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.690438 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmz4h\" (UniqueName: \"kubernetes.io/projected/4e547abc-412f-415b-ae64-4df536300d7d-kube-api-access-tmz4h\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.690482 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.690498 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:25 crc kubenswrapper[4863]: I1205 08:43:25.690509 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e547abc-412f-415b-ae64-4df536300d7d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.052189 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-chtq4" event={"ID":"4e547abc-412f-415b-ae64-4df536300d7d","Type":"ContainerDied","Data":"524991f6d19f1e0db485f2924ead2e418c535632302e78c9a6c99b0368e770a2"} Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.052236 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="524991f6d19f1e0db485f2924ead2e418c535632302e78c9a6c99b0368e770a2" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.052246 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-chtq4" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.120719 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-9pbtw"] Dec 05 08:43:26 crc kubenswrapper[4863]: E1205 08:43:26.121398 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e547abc-412f-415b-ae64-4df536300d7d" containerName="run-os-openstack-openstack-cell1" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.121421 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e547abc-412f-415b-ae64-4df536300d7d" containerName="run-os-openstack-openstack-cell1" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.121721 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e547abc-412f-415b-ae64-4df536300d7d" containerName="run-os-openstack-openstack-cell1" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.122466 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.125198 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.126907 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.127053 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.127217 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.151040 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-9pbtw"] Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.202972 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxzbh\" (UniqueName: \"kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.203031 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.203071 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.203128 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.304528 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxzbh\" (UniqueName: \"kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.304583 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.304626 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.304689 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.322239 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.328028 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.347059 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.376531 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxzbh\" (UniqueName: \"kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh\") pod \"reboot-os-openstack-openstack-cell1-9pbtw\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:26 crc kubenswrapper[4863]: I1205 08:43:26.459874 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:27 crc kubenswrapper[4863]: I1205 08:43:27.017989 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-9pbtw"] Dec 05 08:43:27 crc kubenswrapper[4863]: I1205 08:43:27.061577 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" event={"ID":"20f4cfb0-2e67-48ce-9e9c-dda9ef756746","Type":"ContainerStarted","Data":"d85a6890eec3adba7895e188ebf19a3c729ac247f94bda0023481acb2468a7ab"} Dec 05 08:43:29 crc kubenswrapper[4863]: I1205 08:43:29.084634 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" event={"ID":"20f4cfb0-2e67-48ce-9e9c-dda9ef756746","Type":"ContainerStarted","Data":"105a5131aeb69ac24ffb3fe66c16be30adca8d1b3c1cd99dd549c21e9581b6a1"} Dec 05 08:43:29 crc kubenswrapper[4863]: I1205 08:43:29.108183 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" podStartSLOduration=1.673965233 podStartE2EDuration="3.108165255s" podCreationTimestamp="2025-12-05 08:43:26 +0000 UTC" firstStartedPulling="2025-12-05 08:43:27.02102767 +0000 UTC m=+7034.747024710" lastFinishedPulling="2025-12-05 08:43:28.455227692 +0000 UTC m=+7036.181224732" observedRunningTime="2025-12-05 08:43:29.103368008 +0000 UTC m=+7036.829365048" watchObservedRunningTime="2025-12-05 08:43:29.108165255 +0000 UTC m=+7036.834162295" Dec 05 08:43:38 crc kubenswrapper[4863]: I1205 08:43:38.464571 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:43:38 crc kubenswrapper[4863]: I1205 08:43:38.465393 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:43:44 crc kubenswrapper[4863]: I1205 08:43:44.225141 4863 generic.go:334] "Generic (PLEG): container finished" podID="20f4cfb0-2e67-48ce-9e9c-dda9ef756746" containerID="105a5131aeb69ac24ffb3fe66c16be30adca8d1b3c1cd99dd549c21e9581b6a1" exitCode=0 Dec 05 08:43:44 crc kubenswrapper[4863]: I1205 08:43:44.225203 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" event={"ID":"20f4cfb0-2e67-48ce-9e9c-dda9ef756746","Type":"ContainerDied","Data":"105a5131aeb69ac24ffb3fe66c16be30adca8d1b3c1cd99dd549c21e9581b6a1"} Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.752007 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.901762 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxzbh\" (UniqueName: \"kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh\") pod \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.902089 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key\") pod \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.902156 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory\") pod \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.902233 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph\") pod \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\" (UID: \"20f4cfb0-2e67-48ce-9e9c-dda9ef756746\") " Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.907570 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph" (OuterVolumeSpecName: "ceph") pod "20f4cfb0-2e67-48ce-9e9c-dda9ef756746" (UID: "20f4cfb0-2e67-48ce-9e9c-dda9ef756746"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.907760 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh" (OuterVolumeSpecName: "kube-api-access-xxzbh") pod "20f4cfb0-2e67-48ce-9e9c-dda9ef756746" (UID: "20f4cfb0-2e67-48ce-9e9c-dda9ef756746"). InnerVolumeSpecName "kube-api-access-xxzbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.930791 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "20f4cfb0-2e67-48ce-9e9c-dda9ef756746" (UID: "20f4cfb0-2e67-48ce-9e9c-dda9ef756746"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:45 crc kubenswrapper[4863]: I1205 08:43:45.932366 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory" (OuterVolumeSpecName: "inventory") pod "20f4cfb0-2e67-48ce-9e9c-dda9ef756746" (UID: "20f4cfb0-2e67-48ce-9e9c-dda9ef756746"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.004554 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.004585 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxzbh\" (UniqueName: \"kubernetes.io/projected/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-kube-api-access-xxzbh\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.004597 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.004607 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20f4cfb0-2e67-48ce-9e9c-dda9ef756746-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.250502 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" event={"ID":"20f4cfb0-2e67-48ce-9e9c-dda9ef756746","Type":"ContainerDied","Data":"d85a6890eec3adba7895e188ebf19a3c729ac247f94bda0023481acb2468a7ab"} Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.250555 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d85a6890eec3adba7895e188ebf19a3c729ac247f94bda0023481acb2468a7ab" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.250594 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-9pbtw" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.363597 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-zfd9m"] Dec 05 08:43:46 crc kubenswrapper[4863]: E1205 08:43:46.364354 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20f4cfb0-2e67-48ce-9e9c-dda9ef756746" containerName="reboot-os-openstack-openstack-cell1" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.364372 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="20f4cfb0-2e67-48ce-9e9c-dda9ef756746" containerName="reboot-os-openstack-openstack-cell1" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.364580 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="20f4cfb0-2e67-48ce-9e9c-dda9ef756746" containerName="reboot-os-openstack-openstack-cell1" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.365314 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.369949 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.370126 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.370338 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.370916 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-zfd9m"] Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.376617 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.512514 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.512569 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.512618 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.512822 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.512907 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvlxn\" (UniqueName: \"kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513060 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513140 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513172 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513219 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513399 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513658 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.513713 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.615763 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.615840 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.615880 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.615911 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.615958 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616024 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616069 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvlxn\" (UniqueName: \"kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616237 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616366 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616425 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616809 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.616956 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.621519 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.621906 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.621999 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.622136 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.622581 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.622654 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.623081 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.623358 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.627105 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.627323 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.629603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.638807 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvlxn\" (UniqueName: \"kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn\") pod \"install-certs-openstack-openstack-cell1-zfd9m\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:46 crc kubenswrapper[4863]: I1205 08:43:46.697345 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:43:47 crc kubenswrapper[4863]: I1205 08:43:47.209783 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-zfd9m"] Dec 05 08:43:47 crc kubenswrapper[4863]: I1205 08:43:47.259610 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" event={"ID":"54d41b89-e5bc-4b3a-b675-78b3eed49d1d","Type":"ContainerStarted","Data":"56a0371830c8b61eb46d855fb845ba45296d046cf68667ad4baf83a8dc7328ee"} Dec 05 08:43:48 crc kubenswrapper[4863]: I1205 08:43:48.268421 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" event={"ID":"54d41b89-e5bc-4b3a-b675-78b3eed49d1d","Type":"ContainerStarted","Data":"c79865f50850ed4b581d23c7e27d26c598dea5bfcd36a9b3dbc1f0fcd6309526"} Dec 05 08:43:48 crc kubenswrapper[4863]: I1205 08:43:48.296333 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" podStartSLOduration=1.9254090019999999 podStartE2EDuration="2.296314435s" podCreationTimestamp="2025-12-05 08:43:46 +0000 UTC" firstStartedPulling="2025-12-05 08:43:47.218075506 +0000 UTC m=+7054.944072546" lastFinishedPulling="2025-12-05 08:43:47.588980919 +0000 UTC m=+7055.314977979" observedRunningTime="2025-12-05 08:43:48.288943636 +0000 UTC m=+7056.014940676" watchObservedRunningTime="2025-12-05 08:43:48.296314435 +0000 UTC m=+7056.022311475" Dec 05 08:44:06 crc kubenswrapper[4863]: I1205 08:44:06.446269 4863 generic.go:334] "Generic (PLEG): container finished" podID="54d41b89-e5bc-4b3a-b675-78b3eed49d1d" containerID="c79865f50850ed4b581d23c7e27d26c598dea5bfcd36a9b3dbc1f0fcd6309526" exitCode=0 Dec 05 08:44:06 crc kubenswrapper[4863]: I1205 08:44:06.446369 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" event={"ID":"54d41b89-e5bc-4b3a-b675-78b3eed49d1d","Type":"ContainerDied","Data":"c79865f50850ed4b581d23c7e27d26c598dea5bfcd36a9b3dbc1f0fcd6309526"} Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.003329 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.079432 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.079883 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.079907 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvlxn\" (UniqueName: \"kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.079923 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.079976 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.080011 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.080033 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.081167 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.081319 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.081728 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.081779 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.081801 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle\") pod \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\" (UID: \"54d41b89-e5bc-4b3a-b675-78b3eed49d1d\") " Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.085979 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.086801 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.087406 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.087602 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph" (OuterVolumeSpecName: "ceph") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.087680 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.087852 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.088254 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.088955 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.089712 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn" (OuterVolumeSpecName: "kube-api-access-qvlxn") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "kube-api-access-qvlxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.089721 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.115768 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.116003 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory" (OuterVolumeSpecName: "inventory") pod "54d41b89-e5bc-4b3a-b675-78b3eed49d1d" (UID: "54d41b89-e5bc-4b3a-b675-78b3eed49d1d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185777 4863 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185811 4863 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185823 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185833 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185864 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185874 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvlxn\" (UniqueName: \"kubernetes.io/projected/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-kube-api-access-qvlxn\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185883 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185892 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185901 4863 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185908 4863 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185917 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.185927 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54d41b89-e5bc-4b3a-b675-78b3eed49d1d-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.463774 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.463839 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.466131 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" event={"ID":"54d41b89-e5bc-4b3a-b675-78b3eed49d1d","Type":"ContainerDied","Data":"56a0371830c8b61eb46d855fb845ba45296d046cf68667ad4baf83a8dc7328ee"} Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.466169 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56a0371830c8b61eb46d855fb845ba45296d046cf68667ad4baf83a8dc7328ee" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.466194 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-zfd9m" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.546588 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tftgl"] Dec 05 08:44:08 crc kubenswrapper[4863]: E1205 08:44:08.546982 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d41b89-e5bc-4b3a-b675-78b3eed49d1d" containerName="install-certs-openstack-openstack-cell1" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.546999 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d41b89-e5bc-4b3a-b675-78b3eed49d1d" containerName="install-certs-openstack-openstack-cell1" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.547207 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="54d41b89-e5bc-4b3a-b675-78b3eed49d1d" containerName="install-certs-openstack-openstack-cell1" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.547947 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.550171 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.550321 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.550400 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.551990 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.560112 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tftgl"] Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.694334 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjwkm\" (UniqueName: \"kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.694410 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.694836 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.695169 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.797490 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.797754 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.797894 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjwkm\" (UniqueName: \"kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.797988 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.801940 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.802005 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.802651 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.816302 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjwkm\" (UniqueName: \"kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm\") pod \"ceph-client-openstack-openstack-cell1-tftgl\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:08 crc kubenswrapper[4863]: I1205 08:44:08.864303 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:09 crc kubenswrapper[4863]: I1205 08:44:09.392079 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tftgl"] Dec 05 08:44:09 crc kubenswrapper[4863]: I1205 08:44:09.474951 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" event={"ID":"fbf27c93-62df-46a6-95c8-a3438e563849","Type":"ContainerStarted","Data":"159df4cdd1c9bea20df1d0a3a2822a27e2717f3935db8e2311c71ef3a773e5dd"} Dec 05 08:44:10 crc kubenswrapper[4863]: I1205 08:44:10.485058 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" event={"ID":"fbf27c93-62df-46a6-95c8-a3438e563849","Type":"ContainerStarted","Data":"55b2e4b52632f44abbdeb766e950fef0ef9d7c9b84ccc18547993a5850d85ee9"} Dec 05 08:44:10 crc kubenswrapper[4863]: I1205 08:44:10.504613 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" podStartSLOduration=2.07230425 podStartE2EDuration="2.504595929s" podCreationTimestamp="2025-12-05 08:44:08 +0000 UTC" firstStartedPulling="2025-12-05 08:44:09.396300227 +0000 UTC m=+7077.122297267" lastFinishedPulling="2025-12-05 08:44:09.828591906 +0000 UTC m=+7077.554588946" observedRunningTime="2025-12-05 08:44:10.500640763 +0000 UTC m=+7078.226637803" watchObservedRunningTime="2025-12-05 08:44:10.504595929 +0000 UTC m=+7078.230592969" Dec 05 08:44:15 crc kubenswrapper[4863]: I1205 08:44:15.535127 4863 generic.go:334] "Generic (PLEG): container finished" podID="fbf27c93-62df-46a6-95c8-a3438e563849" containerID="55b2e4b52632f44abbdeb766e950fef0ef9d7c9b84ccc18547993a5850d85ee9" exitCode=0 Dec 05 08:44:15 crc kubenswrapper[4863]: I1205 08:44:15.535205 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" event={"ID":"fbf27c93-62df-46a6-95c8-a3438e563849","Type":"ContainerDied","Data":"55b2e4b52632f44abbdeb766e950fef0ef9d7c9b84ccc18547993a5850d85ee9"} Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.023530 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.082830 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key\") pod \"fbf27c93-62df-46a6-95c8-a3438e563849\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.082940 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjwkm\" (UniqueName: \"kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm\") pod \"fbf27c93-62df-46a6-95c8-a3438e563849\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.084170 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph\") pod \"fbf27c93-62df-46a6-95c8-a3438e563849\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.084400 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory\") pod \"fbf27c93-62df-46a6-95c8-a3438e563849\" (UID: \"fbf27c93-62df-46a6-95c8-a3438e563849\") " Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.096208 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm" (OuterVolumeSpecName: "kube-api-access-fjwkm") pod "fbf27c93-62df-46a6-95c8-a3438e563849" (UID: "fbf27c93-62df-46a6-95c8-a3438e563849"). InnerVolumeSpecName "kube-api-access-fjwkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.112445 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph" (OuterVolumeSpecName: "ceph") pod "fbf27c93-62df-46a6-95c8-a3438e563849" (UID: "fbf27c93-62df-46a6-95c8-a3438e563849"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.119538 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fbf27c93-62df-46a6-95c8-a3438e563849" (UID: "fbf27c93-62df-46a6-95c8-a3438e563849"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.120861 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory" (OuterVolumeSpecName: "inventory") pod "fbf27c93-62df-46a6-95c8-a3438e563849" (UID: "fbf27c93-62df-46a6-95c8-a3438e563849"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.187109 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.187177 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjwkm\" (UniqueName: \"kubernetes.io/projected/fbf27c93-62df-46a6-95c8-a3438e563849-kube-api-access-fjwkm\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.187193 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.187204 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf27c93-62df-46a6-95c8-a3438e563849-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.554733 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" event={"ID":"fbf27c93-62df-46a6-95c8-a3438e563849","Type":"ContainerDied","Data":"159df4cdd1c9bea20df1d0a3a2822a27e2717f3935db8e2311c71ef3a773e5dd"} Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.554777 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="159df4cdd1c9bea20df1d0a3a2822a27e2717f3935db8e2311c71ef3a773e5dd" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.554846 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tftgl" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.629774 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-l64s5"] Dec 05 08:44:17 crc kubenswrapper[4863]: E1205 08:44:17.630209 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf27c93-62df-46a6-95c8-a3438e563849" containerName="ceph-client-openstack-openstack-cell1" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.630227 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf27c93-62df-46a6-95c8-a3438e563849" containerName="ceph-client-openstack-openstack-cell1" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.630412 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf27c93-62df-46a6-95c8-a3438e563849" containerName="ceph-client-openstack-openstack-cell1" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.631146 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.634034 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.634275 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.634515 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.635283 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.635707 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.647942 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-l64s5"] Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.696741 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj8ck\" (UniqueName: \"kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.696804 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.696844 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.697335 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.697571 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.697629 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799093 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799273 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799309 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799400 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj8ck\" (UniqueName: \"kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799444 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.799504 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.800565 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.803417 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.804591 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.804852 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.805267 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.826509 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj8ck\" (UniqueName: \"kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck\") pod \"ovn-openstack-openstack-cell1-l64s5\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:17 crc kubenswrapper[4863]: I1205 08:44:17.957393 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:44:18 crc kubenswrapper[4863]: I1205 08:44:18.531309 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-l64s5"] Dec 05 08:44:18 crc kubenswrapper[4863]: I1205 08:44:18.569159 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-l64s5" event={"ID":"ecd68f58-3555-4f34-98a7-f833c7b3514f","Type":"ContainerStarted","Data":"260b0f21e8aa84835e4b07469d89af8622614fb0c88503415cf3c20bd1904c54"} Dec 05 08:44:19 crc kubenswrapper[4863]: I1205 08:44:19.579884 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-l64s5" event={"ID":"ecd68f58-3555-4f34-98a7-f833c7b3514f","Type":"ContainerStarted","Data":"31aac45b960071d5d50eebc3cea04bc1cf290db814e9857f2322ba5d4a4827f3"} Dec 05 08:44:19 crc kubenswrapper[4863]: I1205 08:44:19.616958 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-l64s5" podStartSLOduration=2.177507851 podStartE2EDuration="2.616933933s" podCreationTimestamp="2025-12-05 08:44:17 +0000 UTC" firstStartedPulling="2025-12-05 08:44:18.536819166 +0000 UTC m=+7086.262816236" lastFinishedPulling="2025-12-05 08:44:18.976245268 +0000 UTC m=+7086.702242318" observedRunningTime="2025-12-05 08:44:19.59508885 +0000 UTC m=+7087.321085910" watchObservedRunningTime="2025-12-05 08:44:19.616933933 +0000 UTC m=+7087.342930973" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.463936 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.464408 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.464451 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.465203 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.465266 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" gracePeriod=600 Dec 05 08:44:38 crc kubenswrapper[4863]: E1205 08:44:38.592035 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.753218 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" exitCode=0 Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.753257 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296"} Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.753318 4863 scope.go:117] "RemoveContainer" containerID="03454eac9d11da1cd83a42f4459000476696e0445a5619adcb508c0c6e1b7ba9" Dec 05 08:44:38 crc kubenswrapper[4863]: I1205 08:44:38.754046 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:44:38 crc kubenswrapper[4863]: E1205 08:44:38.754445 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:44:51 crc kubenswrapper[4863]: I1205 08:44:51.601816 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:44:51 crc kubenswrapper[4863]: E1205 08:44:51.602662 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.770950 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.774438 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.788578 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.901162 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.901312 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hs2f\" (UniqueName: \"kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:55 crc kubenswrapper[4863]: I1205 08:44:55.901619 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.005899 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.006038 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hs2f\" (UniqueName: \"kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.006096 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.006569 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.006679 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.035599 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hs2f\" (UniqueName: \"kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f\") pod \"community-operators-jljjc\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.095345 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.760567 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:44:56 crc kubenswrapper[4863]: I1205 08:44:56.926565 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerStarted","Data":"2ae34226bd66eaf3ef497ea1d1b5cfb11cd12831b57361173372254ebb20d802"} Dec 05 08:44:57 crc kubenswrapper[4863]: I1205 08:44:57.940925 4863 generic.go:334] "Generic (PLEG): container finished" podID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerID="83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2" exitCode=0 Dec 05 08:44:57 crc kubenswrapper[4863]: I1205 08:44:57.941004 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerDied","Data":"83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2"} Dec 05 08:44:57 crc kubenswrapper[4863]: I1205 08:44:57.944569 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:44:58 crc kubenswrapper[4863]: I1205 08:44:58.951765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerStarted","Data":"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00"} Dec 05 08:44:59 crc kubenswrapper[4863]: I1205 08:44:59.962139 4863 generic.go:334] "Generic (PLEG): container finished" podID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerID="2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00" exitCode=0 Dec 05 08:44:59 crc kubenswrapper[4863]: I1205 08:44:59.962189 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerDied","Data":"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00"} Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.167736 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t"] Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.169419 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.171863 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.172066 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.180623 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t"] Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.299762 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.299888 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ds56\" (UniqueName: \"kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.300022 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.401583 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.401994 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ds56\" (UniqueName: \"kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.402123 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.402553 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.407621 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.418828 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ds56\" (UniqueName: \"kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56\") pod \"collect-profiles-29415405-5h42t\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.486982 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:00 crc kubenswrapper[4863]: I1205 08:45:00.976719 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t"] Dec 05 08:45:01 crc kubenswrapper[4863]: I1205 08:45:01.983095 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerStarted","Data":"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468"} Dec 05 08:45:01 crc kubenswrapper[4863]: I1205 08:45:01.985843 4863 generic.go:334] "Generic (PLEG): container finished" podID="88c64e5a-7c28-49a0-9c42-6b2596062da9" containerID="7f6b45be49405f6a5ec2501171db0cd253b94c8f42c5a50938c66cfdf6237420" exitCode=0 Dec 05 08:45:01 crc kubenswrapper[4863]: I1205 08:45:01.985878 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" event={"ID":"88c64e5a-7c28-49a0-9c42-6b2596062da9","Type":"ContainerDied","Data":"7f6b45be49405f6a5ec2501171db0cd253b94c8f42c5a50938c66cfdf6237420"} Dec 05 08:45:01 crc kubenswrapper[4863]: I1205 08:45:01.985905 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" event={"ID":"88c64e5a-7c28-49a0-9c42-6b2596062da9","Type":"ContainerStarted","Data":"0822c2db007c165cb3a7b749195b1c91a3ad3893b106cf93440df571d9b214f6"} Dec 05 08:45:02 crc kubenswrapper[4863]: I1205 08:45:02.007215 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jljjc" podStartSLOduration=3.581145625 podStartE2EDuration="7.007195024s" podCreationTimestamp="2025-12-05 08:44:55 +0000 UTC" firstStartedPulling="2025-12-05 08:44:57.943506245 +0000 UTC m=+7125.669503285" lastFinishedPulling="2025-12-05 08:45:01.369555644 +0000 UTC m=+7129.095552684" observedRunningTime="2025-12-05 08:45:01.997627341 +0000 UTC m=+7129.723624411" watchObservedRunningTime="2025-12-05 08:45:02.007195024 +0000 UTC m=+7129.733192064" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.398498 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.568668 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume\") pod \"88c64e5a-7c28-49a0-9c42-6b2596062da9\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.568975 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume\") pod \"88c64e5a-7c28-49a0-9c42-6b2596062da9\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.569032 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ds56\" (UniqueName: \"kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56\") pod \"88c64e5a-7c28-49a0-9c42-6b2596062da9\" (UID: \"88c64e5a-7c28-49a0-9c42-6b2596062da9\") " Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.569712 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume" (OuterVolumeSpecName: "config-volume") pod "88c64e5a-7c28-49a0-9c42-6b2596062da9" (UID: "88c64e5a-7c28-49a0-9c42-6b2596062da9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.578672 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "88c64e5a-7c28-49a0-9c42-6b2596062da9" (UID: "88c64e5a-7c28-49a0-9c42-6b2596062da9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.579534 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56" (OuterVolumeSpecName: "kube-api-access-9ds56") pod "88c64e5a-7c28-49a0-9c42-6b2596062da9" (UID: "88c64e5a-7c28-49a0-9c42-6b2596062da9"). InnerVolumeSpecName "kube-api-access-9ds56". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.603166 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:45:03 crc kubenswrapper[4863]: E1205 08:45:03.603961 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.672232 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/88c64e5a-7c28-49a0-9c42-6b2596062da9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.672288 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/88c64e5a-7c28-49a0-9c42-6b2596062da9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4863]: I1205 08:45:03.672299 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ds56\" (UniqueName: \"kubernetes.io/projected/88c64e5a-7c28-49a0-9c42-6b2596062da9-kube-api-access-9ds56\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.004924 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" event={"ID":"88c64e5a-7c28-49a0-9c42-6b2596062da9","Type":"ContainerDied","Data":"0822c2db007c165cb3a7b749195b1c91a3ad3893b106cf93440df571d9b214f6"} Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.004973 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0822c2db007c165cb3a7b749195b1c91a3ad3893b106cf93440df571d9b214f6" Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.005016 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t" Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.476609 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc"] Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.484424 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415360-tt5vc"] Dec 05 08:45:04 crc kubenswrapper[4863]: I1205 08:45:04.615280 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbce532d-1dd2-4f9e-8c6b-b44987ee5d37" path="/var/lib/kubelet/pods/dbce532d-1dd2-4f9e-8c6b-b44987ee5d37/volumes" Dec 05 08:45:06 crc kubenswrapper[4863]: I1205 08:45:06.096504 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:06 crc kubenswrapper[4863]: I1205 08:45:06.096878 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:06 crc kubenswrapper[4863]: I1205 08:45:06.143912 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:07 crc kubenswrapper[4863]: I1205 08:45:07.119782 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:07 crc kubenswrapper[4863]: I1205 08:45:07.170207 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.052540 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jljjc" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="registry-server" containerID="cri-o://191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468" gracePeriod=2 Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.683979 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.819527 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities\") pod \"01d9acb3-5c64-450f-879b-bdeec3d515f2\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.819621 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content\") pod \"01d9acb3-5c64-450f-879b-bdeec3d515f2\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.819700 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hs2f\" (UniqueName: \"kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f\") pod \"01d9acb3-5c64-450f-879b-bdeec3d515f2\" (UID: \"01d9acb3-5c64-450f-879b-bdeec3d515f2\") " Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.820569 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities" (OuterVolumeSpecName: "utilities") pod "01d9acb3-5c64-450f-879b-bdeec3d515f2" (UID: "01d9acb3-5c64-450f-879b-bdeec3d515f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.830681 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f" (OuterVolumeSpecName: "kube-api-access-7hs2f") pod "01d9acb3-5c64-450f-879b-bdeec3d515f2" (UID: "01d9acb3-5c64-450f-879b-bdeec3d515f2"). InnerVolumeSpecName "kube-api-access-7hs2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.866914 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01d9acb3-5c64-450f-879b-bdeec3d515f2" (UID: "01d9acb3-5c64-450f-879b-bdeec3d515f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.923242 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.923311 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d9acb3-5c64-450f-879b-bdeec3d515f2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:09 crc kubenswrapper[4863]: I1205 08:45:09.923333 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hs2f\" (UniqueName: \"kubernetes.io/projected/01d9acb3-5c64-450f-879b-bdeec3d515f2-kube-api-access-7hs2f\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.064947 4863 generic.go:334] "Generic (PLEG): container finished" podID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerID="191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468" exitCode=0 Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.064995 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jljjc" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.065008 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerDied","Data":"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468"} Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.065083 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jljjc" event={"ID":"01d9acb3-5c64-450f-879b-bdeec3d515f2","Type":"ContainerDied","Data":"2ae34226bd66eaf3ef497ea1d1b5cfb11cd12831b57361173372254ebb20d802"} Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.065115 4863 scope.go:117] "RemoveContainer" containerID="191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.087166 4863 scope.go:117] "RemoveContainer" containerID="2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.110903 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.120769 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jljjc"] Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.133414 4863 scope.go:117] "RemoveContainer" containerID="83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.185219 4863 scope.go:117] "RemoveContainer" containerID="191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468" Dec 05 08:45:10 crc kubenswrapper[4863]: E1205 08:45:10.186058 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468\": container with ID starting with 191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468 not found: ID does not exist" containerID="191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.186113 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468"} err="failed to get container status \"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468\": rpc error: code = NotFound desc = could not find container \"191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468\": container with ID starting with 191671675bea4b2ee0b9653da7dc8a0382dea9dd60b5e837423cdb85d62f4468 not found: ID does not exist" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.186145 4863 scope.go:117] "RemoveContainer" containerID="2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00" Dec 05 08:45:10 crc kubenswrapper[4863]: E1205 08:45:10.186597 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00\": container with ID starting with 2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00 not found: ID does not exist" containerID="2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.186626 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00"} err="failed to get container status \"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00\": rpc error: code = NotFound desc = could not find container \"2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00\": container with ID starting with 2c506f8ab3bb53d6cdd2186e8ee1dec1fe2f4f823e6dce7575c1567a1e908b00 not found: ID does not exist" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.186639 4863 scope.go:117] "RemoveContainer" containerID="83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2" Dec 05 08:45:10 crc kubenswrapper[4863]: E1205 08:45:10.186878 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2\": container with ID starting with 83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2 not found: ID does not exist" containerID="83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.186901 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2"} err="failed to get container status \"83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2\": rpc error: code = NotFound desc = could not find container \"83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2\": container with ID starting with 83fa63e467abbbe402824752250e105a1c5a8c9f965accfda57106041335afc2 not found: ID does not exist" Dec 05 08:45:10 crc kubenswrapper[4863]: I1205 08:45:10.629426 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" path="/var/lib/kubelet/pods/01d9acb3-5c64-450f-879b-bdeec3d515f2/volumes" Dec 05 08:45:17 crc kubenswrapper[4863]: I1205 08:45:17.602963 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:45:17 crc kubenswrapper[4863]: E1205 08:45:17.604403 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:45:22 crc kubenswrapper[4863]: I1205 08:45:22.192075 4863 generic.go:334] "Generic (PLEG): container finished" podID="ecd68f58-3555-4f34-98a7-f833c7b3514f" containerID="31aac45b960071d5d50eebc3cea04bc1cf290db814e9857f2322ba5d4a4827f3" exitCode=0 Dec 05 08:45:22 crc kubenswrapper[4863]: I1205 08:45:22.192179 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-l64s5" event={"ID":"ecd68f58-3555-4f34-98a7-f833c7b3514f","Type":"ContainerDied","Data":"31aac45b960071d5d50eebc3cea04bc1cf290db814e9857f2322ba5d4a4827f3"} Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.659800 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791276 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791319 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791372 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj8ck\" (UniqueName: \"kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791418 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791496 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.791597 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph\") pod \"ecd68f58-3555-4f34-98a7-f833c7b3514f\" (UID: \"ecd68f58-3555-4f34-98a7-f833c7b3514f\") " Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.797429 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck" (OuterVolumeSpecName: "kube-api-access-dj8ck") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "kube-api-access-dj8ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.797834 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.798678 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph" (OuterVolumeSpecName: "ceph") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.825312 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.827588 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory" (OuterVolumeSpecName: "inventory") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.830058 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "ecd68f58-3555-4f34-98a7-f833c7b3514f" (UID: "ecd68f58-3555-4f34-98a7-f833c7b3514f"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895183 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895214 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895224 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895241 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecd68f58-3555-4f34-98a7-f833c7b3514f-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895255 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj8ck\" (UniqueName: \"kubernetes.io/projected/ecd68f58-3555-4f34-98a7-f833c7b3514f-kube-api-access-dj8ck\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:23 crc kubenswrapper[4863]: I1205 08:45:23.895266 4863 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ecd68f58-3555-4f34-98a7-f833c7b3514f-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.214905 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-l64s5" event={"ID":"ecd68f58-3555-4f34-98a7-f833c7b3514f","Type":"ContainerDied","Data":"260b0f21e8aa84835e4b07469d89af8622614fb0c88503415cf3c20bd1904c54"} Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.214946 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="260b0f21e8aa84835e4b07469d89af8622614fb0c88503415cf3c20bd1904c54" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.214985 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-l64s5" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.299797 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-8r6vz"] Dec 05 08:45:24 crc kubenswrapper[4863]: E1205 08:45:24.300253 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd68f58-3555-4f34-98a7-f833c7b3514f" containerName="ovn-openstack-openstack-cell1" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300271 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd68f58-3555-4f34-98a7-f833c7b3514f" containerName="ovn-openstack-openstack-cell1" Dec 05 08:45:24 crc kubenswrapper[4863]: E1205 08:45:24.300288 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="extract-utilities" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300294 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="extract-utilities" Dec 05 08:45:24 crc kubenswrapper[4863]: E1205 08:45:24.300309 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="extract-content" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300317 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="extract-content" Dec 05 08:45:24 crc kubenswrapper[4863]: E1205 08:45:24.300330 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c64e5a-7c28-49a0-9c42-6b2596062da9" containerName="collect-profiles" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300336 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c64e5a-7c28-49a0-9c42-6b2596062da9" containerName="collect-profiles" Dec 05 08:45:24 crc kubenswrapper[4863]: E1205 08:45:24.300359 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="registry-server" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300364 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="registry-server" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300590 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c64e5a-7c28-49a0-9c42-6b2596062da9" containerName="collect-profiles" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300606 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="01d9acb3-5c64-450f-879b-bdeec3d515f2" containerName="registry-server" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.300616 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd68f58-3555-4f34-98a7-f833c7b3514f" containerName="ovn-openstack-openstack-cell1" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.301382 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.303699 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.304021 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.304839 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.304894 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.305038 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.305255 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.312020 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-8r6vz"] Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.402722 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4tfq\" (UniqueName: \"kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403012 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403069 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403144 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403188 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403249 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.403293 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505035 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505100 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505123 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4tfq\" (UniqueName: \"kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505196 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505232 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505273 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.505300 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.510753 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.511294 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.511375 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.511946 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.512918 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.514072 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.523233 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4tfq\" (UniqueName: \"kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq\") pod \"neutron-metadata-openstack-openstack-cell1-8r6vz\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:24 crc kubenswrapper[4863]: I1205 08:45:24.662230 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:45:25 crc kubenswrapper[4863]: I1205 08:45:25.200098 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-8r6vz"] Dec 05 08:45:25 crc kubenswrapper[4863]: I1205 08:45:25.227764 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" event={"ID":"48649f07-ecaf-4827-bd55-1b03c8d8a53b","Type":"ContainerStarted","Data":"05aa5904df9d91d8cf4c6c14eb61283bbe05d00ddd0d7c0a5021dc1559d4f8b8"} Dec 05 08:45:26 crc kubenswrapper[4863]: I1205 08:45:26.239443 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" event={"ID":"48649f07-ecaf-4827-bd55-1b03c8d8a53b","Type":"ContainerStarted","Data":"5c7769771ab7034737c7308c370692f1d401340ae83d2598b5b6cd8c99684b5c"} Dec 05 08:45:26 crc kubenswrapper[4863]: I1205 08:45:26.264964 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" podStartSLOduration=1.766951654 podStartE2EDuration="2.264940222s" podCreationTimestamp="2025-12-05 08:45:24 +0000 UTC" firstStartedPulling="2025-12-05 08:45:25.205596333 +0000 UTC m=+7152.931593373" lastFinishedPulling="2025-12-05 08:45:25.703584901 +0000 UTC m=+7153.429581941" observedRunningTime="2025-12-05 08:45:26.260326599 +0000 UTC m=+7153.986323689" watchObservedRunningTime="2025-12-05 08:45:26.264940222 +0000 UTC m=+7153.990937262" Dec 05 08:45:30 crc kubenswrapper[4863]: I1205 08:45:30.602351 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:45:30 crc kubenswrapper[4863]: E1205 08:45:30.603152 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:45:39 crc kubenswrapper[4863]: I1205 08:45:39.989510 4863 scope.go:117] "RemoveContainer" containerID="d2684deda275f29d345f6032a8d7be2cc2be4fd9716046b41d7d0aa731fc32d4" Dec 05 08:45:41 crc kubenswrapper[4863]: I1205 08:45:41.602723 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:45:41 crc kubenswrapper[4863]: E1205 08:45:41.603309 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:45:52 crc kubenswrapper[4863]: I1205 08:45:52.610698 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:45:52 crc kubenswrapper[4863]: E1205 08:45:52.611705 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:46:04 crc kubenswrapper[4863]: I1205 08:46:04.602656 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:46:04 crc kubenswrapper[4863]: E1205 08:46:04.603551 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:46:15 crc kubenswrapper[4863]: I1205 08:46:15.601724 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:46:15 crc kubenswrapper[4863]: E1205 08:46:15.602390 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:46:19 crc kubenswrapper[4863]: I1205 08:46:19.345253 4863 generic.go:334] "Generic (PLEG): container finished" podID="48649f07-ecaf-4827-bd55-1b03c8d8a53b" containerID="5c7769771ab7034737c7308c370692f1d401340ae83d2598b5b6cd8c99684b5c" exitCode=0 Dec 05 08:46:19 crc kubenswrapper[4863]: I1205 08:46:19.345337 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" event={"ID":"48649f07-ecaf-4827-bd55-1b03c8d8a53b","Type":"ContainerDied","Data":"5c7769771ab7034737c7308c370692f1d401340ae83d2598b5b6cd8c99684b5c"} Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.796136 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891405 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891567 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4tfq\" (UniqueName: \"kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891692 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891750 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891790 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891806 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.891824 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\" (UID: \"48649f07-ecaf-4827-bd55-1b03c8d8a53b\") " Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.898286 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq" (OuterVolumeSpecName: "kube-api-access-j4tfq") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "kube-api-access-j4tfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.901007 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph" (OuterVolumeSpecName: "ceph") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.901411 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.922248 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.930803 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.931353 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.934774 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory" (OuterVolumeSpecName: "inventory") pod "48649f07-ecaf-4827-bd55-1b03c8d8a53b" (UID: "48649f07-ecaf-4827-bd55-1b03c8d8a53b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994227 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994262 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994273 4863 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994285 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994294 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994304 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48649f07-ecaf-4827-bd55-1b03c8d8a53b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:20 crc kubenswrapper[4863]: I1205 08:46:20.994313 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4tfq\" (UniqueName: \"kubernetes.io/projected/48649f07-ecaf-4827-bd55-1b03c8d8a53b-kube-api-access-j4tfq\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.372971 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" event={"ID":"48649f07-ecaf-4827-bd55-1b03c8d8a53b","Type":"ContainerDied","Data":"05aa5904df9d91d8cf4c6c14eb61283bbe05d00ddd0d7c0a5021dc1559d4f8b8"} Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.373012 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-8r6vz" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.373037 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05aa5904df9d91d8cf4c6c14eb61283bbe05d00ddd0d7c0a5021dc1559d4f8b8" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.477097 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-wwqmh"] Dec 05 08:46:21 crc kubenswrapper[4863]: E1205 08:46:21.477599 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48649f07-ecaf-4827-bd55-1b03c8d8a53b" containerName="neutron-metadata-openstack-openstack-cell1" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.477622 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="48649f07-ecaf-4827-bd55-1b03c8d8a53b" containerName="neutron-metadata-openstack-openstack-cell1" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.477864 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="48649f07-ecaf-4827-bd55-1b03c8d8a53b" containerName="neutron-metadata-openstack-openstack-cell1" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.478696 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.485738 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.485780 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.485757 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.485947 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.486020 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.500020 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-wwqmh"] Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610650 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610691 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610727 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gzk5\" (UniqueName: \"kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610798 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610850 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.610896 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.712907 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gzk5\" (UniqueName: \"kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.713251 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.713326 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.713888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.714948 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.715010 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.717072 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.717629 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.718336 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.719210 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.728046 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.729721 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gzk5\" (UniqueName: \"kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5\") pod \"libvirt-openstack-openstack-cell1-wwqmh\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:21 crc kubenswrapper[4863]: I1205 08:46:21.799685 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:46:22 crc kubenswrapper[4863]: I1205 08:46:22.332981 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-wwqmh"] Dec 05 08:46:22 crc kubenswrapper[4863]: I1205 08:46:22.382948 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" event={"ID":"c16402f6-3e26-4148-9d4d-cce6fae37061","Type":"ContainerStarted","Data":"f24b080ef375e88fbf83282e904c5fc63326fb76a14072fdd916f3aae4b1ec8f"} Dec 05 08:46:23 crc kubenswrapper[4863]: I1205 08:46:23.397048 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" event={"ID":"c16402f6-3e26-4148-9d4d-cce6fae37061","Type":"ContainerStarted","Data":"aecb6bf9835940c81b666051db5bfe2e524155ad6dda0052afdc014464fd9f38"} Dec 05 08:46:23 crc kubenswrapper[4863]: I1205 08:46:23.418901 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" podStartSLOduration=2.001750932 podStartE2EDuration="2.418881521s" podCreationTimestamp="2025-12-05 08:46:21 +0000 UTC" firstStartedPulling="2025-12-05 08:46:22.337617368 +0000 UTC m=+7210.063614408" lastFinishedPulling="2025-12-05 08:46:22.754747957 +0000 UTC m=+7210.480744997" observedRunningTime="2025-12-05 08:46:23.416876442 +0000 UTC m=+7211.142873482" watchObservedRunningTime="2025-12-05 08:46:23.418881521 +0000 UTC m=+7211.144878571" Dec 05 08:46:29 crc kubenswrapper[4863]: I1205 08:46:29.602149 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:46:29 crc kubenswrapper[4863]: E1205 08:46:29.603029 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:46:42 crc kubenswrapper[4863]: I1205 08:46:42.608937 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:46:42 crc kubenswrapper[4863]: E1205 08:46:42.609920 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:46:54 crc kubenswrapper[4863]: I1205 08:46:54.603992 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:46:54 crc kubenswrapper[4863]: E1205 08:46:54.605263 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:47:09 crc kubenswrapper[4863]: I1205 08:47:09.602543 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:47:09 crc kubenswrapper[4863]: E1205 08:47:09.603508 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:47:24 crc kubenswrapper[4863]: I1205 08:47:24.601768 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:47:24 crc kubenswrapper[4863]: E1205 08:47:24.602741 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:47:38 crc kubenswrapper[4863]: I1205 08:47:38.603016 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:47:38 crc kubenswrapper[4863]: E1205 08:47:38.606582 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:47:49 crc kubenswrapper[4863]: I1205 08:47:49.603266 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:47:49 crc kubenswrapper[4863]: E1205 08:47:49.604043 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:48:00 crc kubenswrapper[4863]: I1205 08:48:00.602061 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:48:00 crc kubenswrapper[4863]: E1205 08:48:00.602852 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:48:14 crc kubenswrapper[4863]: I1205 08:48:14.601868 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:48:14 crc kubenswrapper[4863]: E1205 08:48:14.604166 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:48:25 crc kubenswrapper[4863]: I1205 08:48:25.601415 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:48:25 crc kubenswrapper[4863]: E1205 08:48:25.602179 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:48:40 crc kubenswrapper[4863]: I1205 08:48:40.602373 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:48:40 crc kubenswrapper[4863]: E1205 08:48:40.603296 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:48:52 crc kubenswrapper[4863]: I1205 08:48:52.608323 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:48:52 crc kubenswrapper[4863]: E1205 08:48:52.609109 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:49:04 crc kubenswrapper[4863]: I1205 08:49:04.602552 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:49:04 crc kubenswrapper[4863]: E1205 08:49:04.603415 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:49:16 crc kubenswrapper[4863]: I1205 08:49:16.602302 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:49:16 crc kubenswrapper[4863]: E1205 08:49:16.604267 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:49:30 crc kubenswrapper[4863]: I1205 08:49:30.602060 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:49:30 crc kubenswrapper[4863]: E1205 08:49:30.602846 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:49:41 crc kubenswrapper[4863]: I1205 08:49:41.602487 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:49:42 crc kubenswrapper[4863]: I1205 08:49:42.256936 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4"} Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.366146 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.370912 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.383792 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.552533 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwfch\" (UniqueName: \"kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.552841 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.552977 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.655659 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwfch\" (UniqueName: \"kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.655717 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.655766 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.657017 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.657535 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.692776 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwfch\" (UniqueName: \"kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch\") pod \"redhat-marketplace-4st5m\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:15 crc kubenswrapper[4863]: I1205 08:50:15.720361 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:16 crc kubenswrapper[4863]: I1205 08:50:16.245514 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:16 crc kubenswrapper[4863]: I1205 08:50:16.609845 4863 generic.go:334] "Generic (PLEG): container finished" podID="eea85e8e-3b49-4871-989e-b0999552fa84" containerID="e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924" exitCode=0 Dec 05 08:50:16 crc kubenswrapper[4863]: I1205 08:50:16.612897 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:50:16 crc kubenswrapper[4863]: I1205 08:50:16.632637 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerDied","Data":"e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924"} Dec 05 08:50:16 crc kubenswrapper[4863]: I1205 08:50:16.632678 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerStarted","Data":"26e91b44d91da4d83529e5025dc8ed156ef434f2ad29316edaa7606ed9ea1f77"} Dec 05 08:50:17 crc kubenswrapper[4863]: I1205 08:50:17.620184 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerStarted","Data":"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4"} Dec 05 08:50:18 crc kubenswrapper[4863]: I1205 08:50:18.633965 4863 generic.go:334] "Generic (PLEG): container finished" podID="eea85e8e-3b49-4871-989e-b0999552fa84" containerID="594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4" exitCode=0 Dec 05 08:50:18 crc kubenswrapper[4863]: I1205 08:50:18.634026 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerDied","Data":"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4"} Dec 05 08:50:19 crc kubenswrapper[4863]: I1205 08:50:19.643856 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerStarted","Data":"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64"} Dec 05 08:50:19 crc kubenswrapper[4863]: I1205 08:50:19.671209 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4st5m" podStartSLOduration=2.2407264590000002 podStartE2EDuration="4.671186901s" podCreationTimestamp="2025-12-05 08:50:15 +0000 UTC" firstStartedPulling="2025-12-05 08:50:16.612631132 +0000 UTC m=+7444.338628172" lastFinishedPulling="2025-12-05 08:50:19.043091554 +0000 UTC m=+7446.769088614" observedRunningTime="2025-12-05 08:50:19.658146453 +0000 UTC m=+7447.384143513" watchObservedRunningTime="2025-12-05 08:50:19.671186901 +0000 UTC m=+7447.397183941" Dec 05 08:50:25 crc kubenswrapper[4863]: I1205 08:50:25.721195 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:25 crc kubenswrapper[4863]: I1205 08:50:25.723307 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:25 crc kubenswrapper[4863]: I1205 08:50:25.770205 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:26 crc kubenswrapper[4863]: I1205 08:50:26.752710 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:26 crc kubenswrapper[4863]: I1205 08:50:26.802393 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:28 crc kubenswrapper[4863]: I1205 08:50:28.733963 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4st5m" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="registry-server" containerID="cri-o://fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64" gracePeriod=2 Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.244934 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.348437 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content\") pod \"eea85e8e-3b49-4871-989e-b0999552fa84\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.348568 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwfch\" (UniqueName: \"kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch\") pod \"eea85e8e-3b49-4871-989e-b0999552fa84\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.348836 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities\") pod \"eea85e8e-3b49-4871-989e-b0999552fa84\" (UID: \"eea85e8e-3b49-4871-989e-b0999552fa84\") " Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.350123 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities" (OuterVolumeSpecName: "utilities") pod "eea85e8e-3b49-4871-989e-b0999552fa84" (UID: "eea85e8e-3b49-4871-989e-b0999552fa84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.351027 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.357431 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch" (OuterVolumeSpecName: "kube-api-access-dwfch") pod "eea85e8e-3b49-4871-989e-b0999552fa84" (UID: "eea85e8e-3b49-4871-989e-b0999552fa84"). InnerVolumeSpecName "kube-api-access-dwfch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.369817 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eea85e8e-3b49-4871-989e-b0999552fa84" (UID: "eea85e8e-3b49-4871-989e-b0999552fa84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.453563 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea85e8e-3b49-4871-989e-b0999552fa84-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.453613 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwfch\" (UniqueName: \"kubernetes.io/projected/eea85e8e-3b49-4871-989e-b0999552fa84-kube-api-access-dwfch\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.747964 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4st5m" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.747988 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerDied","Data":"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64"} Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.749490 4863 generic.go:334] "Generic (PLEG): container finished" podID="eea85e8e-3b49-4871-989e-b0999552fa84" containerID="fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64" exitCode=0 Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.749911 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4st5m" event={"ID":"eea85e8e-3b49-4871-989e-b0999552fa84","Type":"ContainerDied","Data":"26e91b44d91da4d83529e5025dc8ed156ef434f2ad29316edaa7606ed9ea1f77"} Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.749871 4863 scope.go:117] "RemoveContainer" containerID="fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.780813 4863 scope.go:117] "RemoveContainer" containerID="594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.796185 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.805718 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4st5m"] Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.810134 4863 scope.go:117] "RemoveContainer" containerID="e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.863884 4863 scope.go:117] "RemoveContainer" containerID="fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64" Dec 05 08:50:29 crc kubenswrapper[4863]: E1205 08:50:29.864704 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64\": container with ID starting with fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64 not found: ID does not exist" containerID="fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.864764 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64"} err="failed to get container status \"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64\": rpc error: code = NotFound desc = could not find container \"fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64\": container with ID starting with fec9c51719efd0aba1d33e12394b13512468581e41af277b6ddaffcd8fa6fb64 not found: ID does not exist" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.864806 4863 scope.go:117] "RemoveContainer" containerID="594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4" Dec 05 08:50:29 crc kubenswrapper[4863]: E1205 08:50:29.865326 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4\": container with ID starting with 594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4 not found: ID does not exist" containerID="594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.865393 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4"} err="failed to get container status \"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4\": rpc error: code = NotFound desc = could not find container \"594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4\": container with ID starting with 594ed591d58d26b834dd9225f66f3206e526447faeaf93a8b01e8db777b8f7f4 not found: ID does not exist" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.865452 4863 scope.go:117] "RemoveContainer" containerID="e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924" Dec 05 08:50:29 crc kubenswrapper[4863]: E1205 08:50:29.865880 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924\": container with ID starting with e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924 not found: ID does not exist" containerID="e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924" Dec 05 08:50:29 crc kubenswrapper[4863]: I1205 08:50:29.865968 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924"} err="failed to get container status \"e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924\": rpc error: code = NotFound desc = could not find container \"e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924\": container with ID starting with e0e9e09915b27358065a5072e6619489f78bed53246974ec35dcb2ca0b3b3924 not found: ID does not exist" Dec 05 08:50:30 crc kubenswrapper[4863]: I1205 08:50:30.627469 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" path="/var/lib/kubelet/pods/eea85e8e-3b49-4871-989e-b0999552fa84/volumes" Dec 05 08:50:54 crc kubenswrapper[4863]: I1205 08:50:54.001533 4863 generic.go:334] "Generic (PLEG): container finished" podID="c16402f6-3e26-4148-9d4d-cce6fae37061" containerID="aecb6bf9835940c81b666051db5bfe2e524155ad6dda0052afdc014464fd9f38" exitCode=0 Dec 05 08:50:54 crc kubenswrapper[4863]: I1205 08:50:54.001623 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" event={"ID":"c16402f6-3e26-4148-9d4d-cce6fae37061","Type":"ContainerDied","Data":"aecb6bf9835940c81b666051db5bfe2e524155ad6dda0052afdc014464fd9f38"} Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.505626 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598291 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598408 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598451 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598506 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598527 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.598639 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gzk5\" (UniqueName: \"kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5\") pod \"c16402f6-3e26-4148-9d4d-cce6fae37061\" (UID: \"c16402f6-3e26-4148-9d4d-cce6fae37061\") " Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.604654 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.604671 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph" (OuterVolumeSpecName: "ceph") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.604740 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5" (OuterVolumeSpecName: "kube-api-access-9gzk5") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "kube-api-access-9gzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.627201 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory" (OuterVolumeSpecName: "inventory") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.630737 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.634069 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "c16402f6-3e26-4148-9d4d-cce6fae37061" (UID: "c16402f6-3e26-4148-9d4d-cce6fae37061"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700744 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700785 4863 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700799 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700811 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700823 4863 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16402f6-3e26-4148-9d4d-cce6fae37061-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:55 crc kubenswrapper[4863]: I1205 08:50:55.700835 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gzk5\" (UniqueName: \"kubernetes.io/projected/c16402f6-3e26-4148-9d4d-cce6fae37061-kube-api-access-9gzk5\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.020313 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" event={"ID":"c16402f6-3e26-4148-9d4d-cce6fae37061","Type":"ContainerDied","Data":"f24b080ef375e88fbf83282e904c5fc63326fb76a14072fdd916f3aae4b1ec8f"} Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.020646 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f24b080ef375e88fbf83282e904c5fc63326fb76a14072fdd916f3aae4b1ec8f" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.020389 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-wwqmh" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.133334 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vjm8z"] Dec 05 08:50:56 crc kubenswrapper[4863]: E1205 08:50:56.133839 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c16402f6-3e26-4148-9d4d-cce6fae37061" containerName="libvirt-openstack-openstack-cell1" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.133855 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c16402f6-3e26-4148-9d4d-cce6fae37061" containerName="libvirt-openstack-openstack-cell1" Dec 05 08:50:56 crc kubenswrapper[4863]: E1205 08:50:56.133884 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="registry-server" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.133891 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="registry-server" Dec 05 08:50:56 crc kubenswrapper[4863]: E1205 08:50:56.133906 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="extract-utilities" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.133913 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="extract-utilities" Dec 05 08:50:56 crc kubenswrapper[4863]: E1205 08:50:56.133926 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="extract-content" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.133933 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="extract-content" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.134118 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c16402f6-3e26-4148-9d4d-cce6fae37061" containerName="libvirt-openstack-openstack-cell1" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.134137 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea85e8e-3b49-4871-989e-b0999552fa84" containerName="registry-server" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.134879 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.137856 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139025 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139076 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139093 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139040 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139233 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.139290 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.148704 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vjm8z"] Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211338 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211408 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211443 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211515 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211578 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211612 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211654 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211705 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211790 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chzz2\" (UniqueName: \"kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211870 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.211910 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315861 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315900 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315921 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315940 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.315964 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.316007 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.316027 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.316055 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.316089 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.316143 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chzz2\" (UniqueName: \"kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.325318 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.326014 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.326036 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.326262 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.328042 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.328436 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.328519 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.329136 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.332137 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.332437 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.333124 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chzz2\" (UniqueName: \"kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2\") pod \"nova-cell1-openstack-openstack-cell1-vjm8z\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:56 crc kubenswrapper[4863]: I1205 08:50:56.463333 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:50:57 crc kubenswrapper[4863]: I1205 08:50:57.037163 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vjm8z"] Dec 05 08:50:58 crc kubenswrapper[4863]: I1205 08:50:58.038983 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" event={"ID":"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36","Type":"ContainerStarted","Data":"796cb61a256116e72468175f4c05413f88d85a050f9af99555625a974899b5e0"} Dec 05 08:50:58 crc kubenswrapper[4863]: I1205 08:50:58.039490 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" event={"ID":"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36","Type":"ContainerStarted","Data":"40ee3b2423efa5859d3888f90b08732a252785b65efef1cbe140101cd39a8803"} Dec 05 08:50:58 crc kubenswrapper[4863]: I1205 08:50:58.073398 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" podStartSLOduration=1.672729957 podStartE2EDuration="2.073348444s" podCreationTimestamp="2025-12-05 08:50:56 +0000 UTC" firstStartedPulling="2025-12-05 08:50:57.037170898 +0000 UTC m=+7484.763167938" lastFinishedPulling="2025-12-05 08:50:57.437789385 +0000 UTC m=+7485.163786425" observedRunningTime="2025-12-05 08:50:58.063199527 +0000 UTC m=+7485.789196607" watchObservedRunningTime="2025-12-05 08:50:58.073348444 +0000 UTC m=+7485.799345494" Dec 05 08:52:08 crc kubenswrapper[4863]: I1205 08:52:08.463988 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:52:08 crc kubenswrapper[4863]: I1205 08:52:08.464739 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.197381 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.200584 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.218149 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.338822 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.338951 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.339101 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swm6f\" (UniqueName: \"kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.440664 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swm6f\" (UniqueName: \"kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.440734 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.440818 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.441339 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.441387 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.460461 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swm6f\" (UniqueName: \"kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f\") pod \"redhat-operators-n7z5j\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:20 crc kubenswrapper[4863]: I1205 08:52:20.522970 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:21 crc kubenswrapper[4863]: I1205 08:52:21.007798 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:21 crc kubenswrapper[4863]: I1205 08:52:21.897591 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerID="afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5" exitCode=0 Dec 05 08:52:21 crc kubenswrapper[4863]: I1205 08:52:21.897651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerDied","Data":"afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5"} Dec 05 08:52:21 crc kubenswrapper[4863]: I1205 08:52:21.897927 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerStarted","Data":"bda4a4d310e130bc2d19f4ec1e0d99804a65f63c6f6749102851cdf867df5964"} Dec 05 08:52:23 crc kubenswrapper[4863]: I1205 08:52:23.948863 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerStarted","Data":"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8"} Dec 05 08:52:25 crc kubenswrapper[4863]: I1205 08:52:25.968892 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerID="ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8" exitCode=0 Dec 05 08:52:25 crc kubenswrapper[4863]: I1205 08:52:25.968990 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerDied","Data":"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8"} Dec 05 08:52:26 crc kubenswrapper[4863]: I1205 08:52:26.982039 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerStarted","Data":"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837"} Dec 05 08:52:27 crc kubenswrapper[4863]: I1205 08:52:27.009569 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n7z5j" podStartSLOduration=2.570125593 podStartE2EDuration="7.009547462s" podCreationTimestamp="2025-12-05 08:52:20 +0000 UTC" firstStartedPulling="2025-12-05 08:52:21.8998808 +0000 UTC m=+7569.625877840" lastFinishedPulling="2025-12-05 08:52:26.339302669 +0000 UTC m=+7574.065299709" observedRunningTime="2025-12-05 08:52:27.000267726 +0000 UTC m=+7574.726264776" watchObservedRunningTime="2025-12-05 08:52:27.009547462 +0000 UTC m=+7574.735544502" Dec 05 08:52:30 crc kubenswrapper[4863]: I1205 08:52:30.523221 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:30 crc kubenswrapper[4863]: I1205 08:52:30.523615 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:31 crc kubenswrapper[4863]: I1205 08:52:31.588169 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n7z5j" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="registry-server" probeResult="failure" output=< Dec 05 08:52:31 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 08:52:31 crc kubenswrapper[4863]: > Dec 05 08:52:38 crc kubenswrapper[4863]: I1205 08:52:38.464001 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:52:38 crc kubenswrapper[4863]: I1205 08:52:38.464651 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:52:40 crc kubenswrapper[4863]: I1205 08:52:40.570332 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:40 crc kubenswrapper[4863]: I1205 08:52:40.619925 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:40 crc kubenswrapper[4863]: I1205 08:52:40.808103 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.131829 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n7z5j" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="registry-server" containerID="cri-o://861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837" gracePeriod=2 Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.801171 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.927940 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content\") pod \"1d172e00-2947-4293-96a5-8246d6cd39c7\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.928262 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities\") pod \"1d172e00-2947-4293-96a5-8246d6cd39c7\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.928313 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swm6f\" (UniqueName: \"kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f\") pod \"1d172e00-2947-4293-96a5-8246d6cd39c7\" (UID: \"1d172e00-2947-4293-96a5-8246d6cd39c7\") " Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.929102 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities" (OuterVolumeSpecName: "utilities") pod "1d172e00-2947-4293-96a5-8246d6cd39c7" (UID: "1d172e00-2947-4293-96a5-8246d6cd39c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:52:42 crc kubenswrapper[4863]: I1205 08:52:42.936367 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f" (OuterVolumeSpecName: "kube-api-access-swm6f") pod "1d172e00-2947-4293-96a5-8246d6cd39c7" (UID: "1d172e00-2947-4293-96a5-8246d6cd39c7"). InnerVolumeSpecName "kube-api-access-swm6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.031106 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.031146 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swm6f\" (UniqueName: \"kubernetes.io/projected/1d172e00-2947-4293-96a5-8246d6cd39c7-kube-api-access-swm6f\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.048366 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d172e00-2947-4293-96a5-8246d6cd39c7" (UID: "1d172e00-2947-4293-96a5-8246d6cd39c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.132699 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d172e00-2947-4293-96a5-8246d6cd39c7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.142414 4863 generic.go:334] "Generic (PLEG): container finished" podID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerID="861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837" exitCode=0 Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.142547 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerDied","Data":"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837"} Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.142575 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7z5j" event={"ID":"1d172e00-2947-4293-96a5-8246d6cd39c7","Type":"ContainerDied","Data":"bda4a4d310e130bc2d19f4ec1e0d99804a65f63c6f6749102851cdf867df5964"} Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.142591 4863 scope.go:117] "RemoveContainer" containerID="861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.142715 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7z5j" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.165362 4863 scope.go:117] "RemoveContainer" containerID="ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.174393 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.183755 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n7z5j"] Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.217734 4863 scope.go:117] "RemoveContainer" containerID="afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.243302 4863 scope.go:117] "RemoveContainer" containerID="861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837" Dec 05 08:52:43 crc kubenswrapper[4863]: E1205 08:52:43.243936 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837\": container with ID starting with 861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837 not found: ID does not exist" containerID="861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.243979 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837"} err="failed to get container status \"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837\": rpc error: code = NotFound desc = could not find container \"861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837\": container with ID starting with 861b8e110998ad27ac6dffdac67a8d6652edfcb590ee241cfdeddbe7996c9837 not found: ID does not exist" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.244007 4863 scope.go:117] "RemoveContainer" containerID="ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8" Dec 05 08:52:43 crc kubenswrapper[4863]: E1205 08:52:43.244530 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8\": container with ID starting with ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8 not found: ID does not exist" containerID="ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.244568 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8"} err="failed to get container status \"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8\": rpc error: code = NotFound desc = could not find container \"ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8\": container with ID starting with ae7de11041b41f19ce01aa881d10b2f6a92c8dcc931860eebc74a8810addb5c8 not found: ID does not exist" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.244590 4863 scope.go:117] "RemoveContainer" containerID="afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5" Dec 05 08:52:43 crc kubenswrapper[4863]: E1205 08:52:43.244986 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5\": container with ID starting with afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5 not found: ID does not exist" containerID="afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5" Dec 05 08:52:43 crc kubenswrapper[4863]: I1205 08:52:43.245039 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5"} err="failed to get container status \"afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5\": rpc error: code = NotFound desc = could not find container \"afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5\": container with ID starting with afea71b9ef3f685d8b1c2afb7c78b1edf9aa4c8d618a76832269803fcbb641d5 not found: ID does not exist" Dec 05 08:52:44 crc kubenswrapper[4863]: I1205 08:52:44.620325 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" path="/var/lib/kubelet/pods/1d172e00-2947-4293-96a5-8246d6cd39c7/volumes" Dec 05 08:53:08 crc kubenswrapper[4863]: I1205 08:53:08.464066 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:53:08 crc kubenswrapper[4863]: I1205 08:53:08.464721 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:53:08 crc kubenswrapper[4863]: I1205 08:53:08.464779 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:53:08 crc kubenswrapper[4863]: I1205 08:53:08.465667 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:53:08 crc kubenswrapper[4863]: I1205 08:53:08.465770 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4" gracePeriod=600 Dec 05 08:53:09 crc kubenswrapper[4863]: I1205 08:53:09.429919 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4" exitCode=0 Dec 05 08:53:09 crc kubenswrapper[4863]: I1205 08:53:09.429996 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4"} Dec 05 08:53:09 crc kubenswrapper[4863]: I1205 08:53:09.430419 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb"} Dec 05 08:53:09 crc kubenswrapper[4863]: I1205 08:53:09.430436 4863 scope.go:117] "RemoveContainer" containerID="fbac4cbfb7c4f350c2471eac20938312076641498de25ca4275d188e1e823296" Dec 05 08:54:04 crc kubenswrapper[4863]: I1205 08:54:04.974449 4863 generic.go:334] "Generic (PLEG): container finished" podID="d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" containerID="796cb61a256116e72468175f4c05413f88d85a050f9af99555625a974899b5e0" exitCode=0 Dec 05 08:54:04 crc kubenswrapper[4863]: I1205 08:54:04.974668 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" event={"ID":"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36","Type":"ContainerDied","Data":"796cb61a256116e72468175f4c05413f88d85a050f9af99555625a974899b5e0"} Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.547459 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.665204 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chzz2\" (UniqueName: \"kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.665862 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666034 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666208 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666312 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666425 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666619 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666743 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.666845 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.667064 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.667156 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0\") pod \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\" (UID: \"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36\") " Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.678114 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2" (OuterVolumeSpecName: "kube-api-access-chzz2") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "kube-api-access-chzz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.683089 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph" (OuterVolumeSpecName: "ceph") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.684987 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.704929 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.712950 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.719301 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory" (OuterVolumeSpecName: "inventory") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.727734 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.728197 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.729953 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.730967 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.742322 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" (UID: "d7ea9e9b-e21d-4a97-a1ff-e5be88487c36"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770190 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770218 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770232 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chzz2\" (UniqueName: \"kubernetes.io/projected/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-kube-api-access-chzz2\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770245 4863 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770258 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770270 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770283 4863 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770294 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770306 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770317 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.770329 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d7ea9e9b-e21d-4a97-a1ff-e5be88487c36-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.995090 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" event={"ID":"d7ea9e9b-e21d-4a97-a1ff-e5be88487c36","Type":"ContainerDied","Data":"40ee3b2423efa5859d3888f90b08732a252785b65efef1cbe140101cd39a8803"} Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.995132 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40ee3b2423efa5859d3888f90b08732a252785b65efef1cbe140101cd39a8803" Dec 05 08:54:06 crc kubenswrapper[4863]: I1205 08:54:06.995187 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vjm8z" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.135386 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-4sf42"] Dec 05 08:54:07 crc kubenswrapper[4863]: E1205 08:54:07.135885 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="registry-server" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.135907 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="registry-server" Dec 05 08:54:07 crc kubenswrapper[4863]: E1205 08:54:07.135941 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="extract-utilities" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.135950 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="extract-utilities" Dec 05 08:54:07 crc kubenswrapper[4863]: E1205 08:54:07.135971 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="extract-content" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.135978 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="extract-content" Dec 05 08:54:07 crc kubenswrapper[4863]: E1205 08:54:07.136001 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" containerName="nova-cell1-openstack-openstack-cell1" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.136011 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" containerName="nova-cell1-openstack-openstack-cell1" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.136227 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d172e00-2947-4293-96a5-8246d6cd39c7" containerName="registry-server" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.136268 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7ea9e9b-e21d-4a97-a1ff-e5be88487c36" containerName="nova-cell1-openstack-openstack-cell1" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.137730 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.139691 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.139920 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.140173 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.140394 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.140644 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.184541 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-4sf42"] Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281104 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281229 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281289 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281435 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281570 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsbjx\" (UniqueName: \"kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281675 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281796 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.281876 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384503 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384576 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384806 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384848 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384875 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384948 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.384975 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsbjx\" (UniqueName: \"kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.385009 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.388312 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.388387 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.388462 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.388728 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.389118 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.389179 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.389622 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.406551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsbjx\" (UniqueName: \"kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx\") pod \"telemetry-openstack-openstack-cell1-4sf42\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.455535 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:54:07 crc kubenswrapper[4863]: I1205 08:54:07.975335 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-4sf42"] Dec 05 08:54:08 crc kubenswrapper[4863]: I1205 08:54:08.006772 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" event={"ID":"084f48bc-7cc9-4c09-862f-e0218ee9087a","Type":"ContainerStarted","Data":"88394696cb7be56eb8e415d13d5d0c693610f2b6b63933055384ea9b9f4a2fe9"} Dec 05 08:54:09 crc kubenswrapper[4863]: I1205 08:54:09.020260 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" event={"ID":"084f48bc-7cc9-4c09-862f-e0218ee9087a","Type":"ContainerStarted","Data":"ccdfa53c64e02658fdef9925a79dca8252ef9b07b135ddbe566d0769374edc63"} Dec 05 08:54:09 crc kubenswrapper[4863]: I1205 08:54:09.045863 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" podStartSLOduration=1.6368647410000001 podStartE2EDuration="2.045845981s" podCreationTimestamp="2025-12-05 08:54:07 +0000 UTC" firstStartedPulling="2025-12-05 08:54:07.986743228 +0000 UTC m=+7675.712740268" lastFinishedPulling="2025-12-05 08:54:08.395724458 +0000 UTC m=+7676.121721508" observedRunningTime="2025-12-05 08:54:09.038027791 +0000 UTC m=+7676.764024831" watchObservedRunningTime="2025-12-05 08:54:09.045845981 +0000 UTC m=+7676.771843021" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.264650 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.269930 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.279494 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.456191 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.456249 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hszk\" (UniqueName: \"kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.456329 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.558131 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.558419 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.558516 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hszk\" (UniqueName: \"kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.558694 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.558880 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.581694 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hszk\" (UniqueName: \"kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk\") pod \"certified-operators-zlgtf\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:51 crc kubenswrapper[4863]: I1205 08:54:51.602371 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:54:52 crc kubenswrapper[4863]: I1205 08:54:52.140497 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:54:52 crc kubenswrapper[4863]: I1205 08:54:52.471811 4863 generic.go:334] "Generic (PLEG): container finished" podID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerID="350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f" exitCode=0 Dec 05 08:54:52 crc kubenswrapper[4863]: I1205 08:54:52.471869 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerDied","Data":"350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f"} Dec 05 08:54:52 crc kubenswrapper[4863]: I1205 08:54:52.471932 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerStarted","Data":"80edd598b13f4997257fe6bd27c20233af758dbabd72f5bebec55ccc55c721ca"} Dec 05 08:54:53 crc kubenswrapper[4863]: I1205 08:54:53.481388 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerStarted","Data":"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7"} Dec 05 08:54:54 crc kubenswrapper[4863]: I1205 08:54:54.495455 4863 generic.go:334] "Generic (PLEG): container finished" podID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerID="c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7" exitCode=0 Dec 05 08:54:54 crc kubenswrapper[4863]: I1205 08:54:54.495544 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerDied","Data":"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7"} Dec 05 08:54:55 crc kubenswrapper[4863]: I1205 08:54:55.508059 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerStarted","Data":"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5"} Dec 05 08:54:55 crc kubenswrapper[4863]: I1205 08:54:55.541908 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zlgtf" podStartSLOduration=2.085155803 podStartE2EDuration="4.541880587s" podCreationTimestamp="2025-12-05 08:54:51 +0000 UTC" firstStartedPulling="2025-12-05 08:54:52.474434999 +0000 UTC m=+7720.200432039" lastFinishedPulling="2025-12-05 08:54:54.931159783 +0000 UTC m=+7722.657156823" observedRunningTime="2025-12-05 08:54:55.531047574 +0000 UTC m=+7723.257044624" watchObservedRunningTime="2025-12-05 08:54:55.541880587 +0000 UTC m=+7723.267877627" Dec 05 08:55:01 crc kubenswrapper[4863]: I1205 08:55:01.602902 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:01 crc kubenswrapper[4863]: I1205 08:55:01.603494 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:01 crc kubenswrapper[4863]: I1205 08:55:01.654871 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:02 crc kubenswrapper[4863]: I1205 08:55:02.617699 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:02 crc kubenswrapper[4863]: I1205 08:55:02.671241 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:55:04 crc kubenswrapper[4863]: I1205 08:55:04.592097 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zlgtf" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="registry-server" containerID="cri-o://c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5" gracePeriod=2 Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.605706 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.606678 4863 generic.go:334] "Generic (PLEG): container finished" podID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerID="c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5" exitCode=0 Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.606707 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerDied","Data":"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5"} Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.607218 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zlgtf" event={"ID":"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43","Type":"ContainerDied","Data":"80edd598b13f4997257fe6bd27c20233af758dbabd72f5bebec55ccc55c721ca"} Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.607240 4863 scope.go:117] "RemoveContainer" containerID="c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.635953 4863 scope.go:117] "RemoveContainer" containerID="c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.663164 4863 scope.go:117] "RemoveContainer" containerID="350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.709568 4863 scope.go:117] "RemoveContainer" containerID="c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5" Dec 05 08:55:05 crc kubenswrapper[4863]: E1205 08:55:05.710189 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5\": container with ID starting with c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5 not found: ID does not exist" containerID="c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.710220 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5"} err="failed to get container status \"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5\": rpc error: code = NotFound desc = could not find container \"c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5\": container with ID starting with c7d7e845217fce1a9999f9f84088548b1a9159f9f3aeeb6ea89fb31d69fdf7c5 not found: ID does not exist" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.710241 4863 scope.go:117] "RemoveContainer" containerID="c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7" Dec 05 08:55:05 crc kubenswrapper[4863]: E1205 08:55:05.710582 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7\": container with ID starting with c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7 not found: ID does not exist" containerID="c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.710641 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7"} err="failed to get container status \"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7\": rpc error: code = NotFound desc = could not find container \"c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7\": container with ID starting with c4825735acb0b54caf76f2e70c89da4c06d213d9c8999e02976a79ea957377a7 not found: ID does not exist" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.710678 4863 scope.go:117] "RemoveContainer" containerID="350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f" Dec 05 08:55:05 crc kubenswrapper[4863]: E1205 08:55:05.711526 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f\": container with ID starting with 350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f not found: ID does not exist" containerID="350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.711563 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f"} err="failed to get container status \"350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f\": rpc error: code = NotFound desc = could not find container \"350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f\": container with ID starting with 350abdcbf8dd6ba9fc047fc9f939e5c6dee4bb9ba750a35b463e19210ab0117f not found: ID does not exist" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.750545 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities\") pod \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.750670 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content\") pod \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.750803 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hszk\" (UniqueName: \"kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk\") pod \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\" (UID: \"5dc8d4b4-9f93-4797-8075-e1a73fcf2f43\") " Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.751685 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities" (OuterVolumeSpecName: "utilities") pod "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" (UID: "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.752368 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.757518 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk" (OuterVolumeSpecName: "kube-api-access-9hszk") pod "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" (UID: "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43"). InnerVolumeSpecName "kube-api-access-9hszk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.802256 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" (UID: "5dc8d4b4-9f93-4797-8075-e1a73fcf2f43"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.854086 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:05 crc kubenswrapper[4863]: I1205 08:55:05.854123 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hszk\" (UniqueName: \"kubernetes.io/projected/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43-kube-api-access-9hszk\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:06 crc kubenswrapper[4863]: I1205 08:55:06.621027 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zlgtf" Dec 05 08:55:06 crc kubenswrapper[4863]: I1205 08:55:06.668846 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:55:06 crc kubenswrapper[4863]: I1205 08:55:06.683528 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zlgtf"] Dec 05 08:55:08 crc kubenswrapper[4863]: I1205 08:55:08.463887 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:55:08 crc kubenswrapper[4863]: I1205 08:55:08.464246 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:55:08 crc kubenswrapper[4863]: I1205 08:55:08.613156 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" path="/var/lib/kubelet/pods/5dc8d4b4-9f93-4797-8075-e1a73fcf2f43/volumes" Dec 05 08:55:38 crc kubenswrapper[4863]: I1205 08:55:38.464753 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:55:38 crc kubenswrapper[4863]: I1205 08:55:38.465525 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.364605 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:39 crc kubenswrapper[4863]: E1205 08:55:39.365490 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="registry-server" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.365513 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="registry-server" Dec 05 08:55:39 crc kubenswrapper[4863]: E1205 08:55:39.365541 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="extract-content" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.365550 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="extract-content" Dec 05 08:55:39 crc kubenswrapper[4863]: E1205 08:55:39.365571 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="extract-utilities" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.365580 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="extract-utilities" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.365822 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dc8d4b4-9f93-4797-8075-e1a73fcf2f43" containerName="registry-server" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.367805 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.376878 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.461995 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.462142 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq9tq\" (UniqueName: \"kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.462206 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.564730 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.564834 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq9tq\" (UniqueName: \"kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.564896 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.565329 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.565348 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.592318 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq9tq\" (UniqueName: \"kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq\") pod \"community-operators-dw4kh\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:39 crc kubenswrapper[4863]: I1205 08:55:39.694579 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:40 crc kubenswrapper[4863]: I1205 08:55:40.254633 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:40 crc kubenswrapper[4863]: I1205 08:55:40.948009 4863 generic.go:334] "Generic (PLEG): container finished" podID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerID="b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289" exitCode=0 Dec 05 08:55:40 crc kubenswrapper[4863]: I1205 08:55:40.948574 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerDied","Data":"b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289"} Dec 05 08:55:40 crc kubenswrapper[4863]: I1205 08:55:40.948602 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerStarted","Data":"7a2e7d220ebe61ef6c0a5051a6c858e23f6c34658a9f80a9c1848988ac5295bc"} Dec 05 08:55:40 crc kubenswrapper[4863]: I1205 08:55:40.952577 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:55:41 crc kubenswrapper[4863]: I1205 08:55:41.959341 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerStarted","Data":"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee"} Dec 05 08:55:42 crc kubenswrapper[4863]: I1205 08:55:42.968873 4863 generic.go:334] "Generic (PLEG): container finished" podID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerID="5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee" exitCode=0 Dec 05 08:55:42 crc kubenswrapper[4863]: I1205 08:55:42.968923 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerDied","Data":"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee"} Dec 05 08:55:43 crc kubenswrapper[4863]: I1205 08:55:43.980956 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerStarted","Data":"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23"} Dec 05 08:55:44 crc kubenswrapper[4863]: I1205 08:55:44.000155 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dw4kh" podStartSLOduration=2.584972288 podStartE2EDuration="5.000140638s" podCreationTimestamp="2025-12-05 08:55:39 +0000 UTC" firstStartedPulling="2025-12-05 08:55:40.950888066 +0000 UTC m=+7768.676885106" lastFinishedPulling="2025-12-05 08:55:43.366056416 +0000 UTC m=+7771.092053456" observedRunningTime="2025-12-05 08:55:43.995702461 +0000 UTC m=+7771.721699501" watchObservedRunningTime="2025-12-05 08:55:44.000140638 +0000 UTC m=+7771.726137678" Dec 05 08:55:49 crc kubenswrapper[4863]: I1205 08:55:49.694985 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:49 crc kubenswrapper[4863]: I1205 08:55:49.698627 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:49 crc kubenswrapper[4863]: I1205 08:55:49.742880 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:50 crc kubenswrapper[4863]: I1205 08:55:50.095536 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:50 crc kubenswrapper[4863]: I1205 08:55:50.154649 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.054197 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dw4kh" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="registry-server" containerID="cri-o://050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23" gracePeriod=2 Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.567791 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.636097 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq9tq\" (UniqueName: \"kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq\") pod \"62f095ab-02c8-40cf-a79b-21d6c4329961\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.636252 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content\") pod \"62f095ab-02c8-40cf-a79b-21d6c4329961\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.636278 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities\") pod \"62f095ab-02c8-40cf-a79b-21d6c4329961\" (UID: \"62f095ab-02c8-40cf-a79b-21d6c4329961\") " Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.638907 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities" (OuterVolumeSpecName: "utilities") pod "62f095ab-02c8-40cf-a79b-21d6c4329961" (UID: "62f095ab-02c8-40cf-a79b-21d6c4329961"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.648025 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq" (OuterVolumeSpecName: "kube-api-access-wq9tq") pod "62f095ab-02c8-40cf-a79b-21d6c4329961" (UID: "62f095ab-02c8-40cf-a79b-21d6c4329961"). InnerVolumeSpecName "kube-api-access-wq9tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.690767 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62f095ab-02c8-40cf-a79b-21d6c4329961" (UID: "62f095ab-02c8-40cf-a79b-21d6c4329961"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.738507 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq9tq\" (UniqueName: \"kubernetes.io/projected/62f095ab-02c8-40cf-a79b-21d6c4329961-kube-api-access-wq9tq\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.738541 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:52 crc kubenswrapper[4863]: I1205 08:55:52.738551 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f095ab-02c8-40cf-a79b-21d6c4329961-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.066719 4863 generic.go:334] "Generic (PLEG): container finished" podID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerID="050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23" exitCode=0 Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.066775 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerDied","Data":"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23"} Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.066790 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw4kh" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.066812 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw4kh" event={"ID":"62f095ab-02c8-40cf-a79b-21d6c4329961","Type":"ContainerDied","Data":"7a2e7d220ebe61ef6c0a5051a6c858e23f6c34658a9f80a9c1848988ac5295bc"} Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.066835 4863 scope.go:117] "RemoveContainer" containerID="050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.086652 4863 scope.go:117] "RemoveContainer" containerID="5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.110709 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.124979 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dw4kh"] Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.127681 4863 scope.go:117] "RemoveContainer" containerID="b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.183820 4863 scope.go:117] "RemoveContainer" containerID="050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23" Dec 05 08:55:53 crc kubenswrapper[4863]: E1205 08:55:53.187008 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23\": container with ID starting with 050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23 not found: ID does not exist" containerID="050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.187036 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23"} err="failed to get container status \"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23\": rpc error: code = NotFound desc = could not find container \"050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23\": container with ID starting with 050c12124f9fd5d0ccd65a66f267160e96ba9a136ed60a192ecad17f42c97f23 not found: ID does not exist" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.187058 4863 scope.go:117] "RemoveContainer" containerID="5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee" Dec 05 08:55:53 crc kubenswrapper[4863]: E1205 08:55:53.187453 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee\": container with ID starting with 5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee not found: ID does not exist" containerID="5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.187482 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee"} err="failed to get container status \"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee\": rpc error: code = NotFound desc = could not find container \"5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee\": container with ID starting with 5aa4478c390917b67fe0e121364f63ad3ef9f0f0ce3441f836a6834cc42762ee not found: ID does not exist" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.187494 4863 scope.go:117] "RemoveContainer" containerID="b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289" Dec 05 08:55:53 crc kubenswrapper[4863]: E1205 08:55:53.187799 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289\": container with ID starting with b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289 not found: ID does not exist" containerID="b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289" Dec 05 08:55:53 crc kubenswrapper[4863]: I1205 08:55:53.187816 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289"} err="failed to get container status \"b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289\": rpc error: code = NotFound desc = could not find container \"b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289\": container with ID starting with b916747784ad8d0ad5409c74a72ba06c6e3adfc4d8f068917d8b324d4c804289 not found: ID does not exist" Dec 05 08:55:54 crc kubenswrapper[4863]: I1205 08:55:54.616194 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" path="/var/lib/kubelet/pods/62f095ab-02c8-40cf-a79b-21d6c4329961/volumes" Dec 05 08:56:08 crc kubenswrapper[4863]: I1205 08:56:08.464064 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:56:08 crc kubenswrapper[4863]: I1205 08:56:08.464807 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:56:08 crc kubenswrapper[4863]: I1205 08:56:08.464885 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 08:56:08 crc kubenswrapper[4863]: I1205 08:56:08.466046 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:56:08 crc kubenswrapper[4863]: I1205 08:56:08.466152 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" gracePeriod=600 Dec 05 08:56:08 crc kubenswrapper[4863]: E1205 08:56:08.654336 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:56:09 crc kubenswrapper[4863]: I1205 08:56:09.241453 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" exitCode=0 Dec 05 08:56:09 crc kubenswrapper[4863]: I1205 08:56:09.241501 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb"} Dec 05 08:56:09 crc kubenswrapper[4863]: I1205 08:56:09.241588 4863 scope.go:117] "RemoveContainer" containerID="9b035f8ed4540bd577808ba1e60d5273623fa73824237e3b4c3f38e08278faf4" Dec 05 08:56:09 crc kubenswrapper[4863]: I1205 08:56:09.242238 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:56:09 crc kubenswrapper[4863]: E1205 08:56:09.242705 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:56:19 crc kubenswrapper[4863]: I1205 08:56:19.601410 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:56:19 crc kubenswrapper[4863]: E1205 08:56:19.602769 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:56:34 crc kubenswrapper[4863]: I1205 08:56:34.602525 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:56:34 crc kubenswrapper[4863]: E1205 08:56:34.603367 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:56:45 crc kubenswrapper[4863]: I1205 08:56:45.601530 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:56:45 crc kubenswrapper[4863]: E1205 08:56:45.602558 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:56:56 crc kubenswrapper[4863]: I1205 08:56:56.602016 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:56:56 crc kubenswrapper[4863]: E1205 08:56:56.603973 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:57:08 crc kubenswrapper[4863]: I1205 08:57:08.602280 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:57:08 crc kubenswrapper[4863]: E1205 08:57:08.603240 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:57:22 crc kubenswrapper[4863]: I1205 08:57:22.608203 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:57:22 crc kubenswrapper[4863]: E1205 08:57:22.608934 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:57:37 crc kubenswrapper[4863]: I1205 08:57:37.604465 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:57:37 crc kubenswrapper[4863]: E1205 08:57:37.605443 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:57:49 crc kubenswrapper[4863]: I1205 08:57:49.602977 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:57:49 crc kubenswrapper[4863]: E1205 08:57:49.603806 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:58:04 crc kubenswrapper[4863]: I1205 08:58:04.603035 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:58:04 crc kubenswrapper[4863]: E1205 08:58:04.604053 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:58:19 crc kubenswrapper[4863]: I1205 08:58:19.602403 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:58:19 crc kubenswrapper[4863]: E1205 08:58:19.603550 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:58:21 crc kubenswrapper[4863]: I1205 08:58:21.565135 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" event={"ID":"084f48bc-7cc9-4c09-862f-e0218ee9087a","Type":"ContainerDied","Data":"ccdfa53c64e02658fdef9925a79dca8252ef9b07b135ddbe566d0769374edc63"} Dec 05 08:58:21 crc kubenswrapper[4863]: I1205 08:58:21.565033 4863 generic.go:334] "Generic (PLEG): container finished" podID="084f48bc-7cc9-4c09-862f-e0218ee9087a" containerID="ccdfa53c64e02658fdef9925a79dca8252ef9b07b135ddbe566d0769374edc63" exitCode=0 Dec 05 08:58:22 crc kubenswrapper[4863]: I1205 08:58:22.995582 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036612 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036712 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036762 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036847 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036915 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsbjx\" (UniqueName: \"kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.036968 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.037028 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.037542 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.042999 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph" (OuterVolumeSpecName: "ceph") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.044008 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.044972 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx" (OuterVolumeSpecName: "kube-api-access-wsbjx") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "kube-api-access-wsbjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.067598 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.073335 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: E1205 08:58:23.080896 4863 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2 podName:084f48bc-7cc9-4c09-862f-e0218ee9087a nodeName:}" failed. No retries permitted until 2025-12-05 08:58:23.580865256 +0000 UTC m=+7931.306862306 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ceilometer-compute-config-data-2" (UniqueName: "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a") : error deleting /var/lib/kubelet/pods/084f48bc-7cc9-4c09-862f-e0218ee9087a/volume-subpaths: remove /var/lib/kubelet/pods/084f48bc-7cc9-4c09-862f-e0218ee9087a/volume-subpaths: no such file or directory Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.081542 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory" (OuterVolumeSpecName: "inventory") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.082789 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140690 4863 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140730 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140743 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140754 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140771 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140783 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.140797 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsbjx\" (UniqueName: \"kubernetes.io/projected/084f48bc-7cc9-4c09-862f-e0218ee9087a-kube-api-access-wsbjx\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.586053 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" event={"ID":"084f48bc-7cc9-4c09-862f-e0218ee9087a","Type":"ContainerDied","Data":"88394696cb7be56eb8e415d13d5d0c693610f2b6b63933055384ea9b9f4a2fe9"} Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.586098 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88394696cb7be56eb8e415d13d5d0c693610f2b6b63933055384ea9b9f4a2fe9" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.586112 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-4sf42" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.648394 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") pod \"084f48bc-7cc9-4c09-862f-e0218ee9087a\" (UID: \"084f48bc-7cc9-4c09-862f-e0218ee9087a\") " Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.656292 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "084f48bc-7cc9-4c09-862f-e0218ee9087a" (UID: "084f48bc-7cc9-4c09-862f-e0218ee9087a"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.677313 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-jn22z"] Dec 05 08:58:23 crc kubenswrapper[4863]: E1205 08:58:23.677723 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="registry-server" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.677738 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="registry-server" Dec 05 08:58:23 crc kubenswrapper[4863]: E1205 08:58:23.677754 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="extract-content" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.677761 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="extract-content" Dec 05 08:58:23 crc kubenswrapper[4863]: E1205 08:58:23.677774 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="extract-utilities" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.677780 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="extract-utilities" Dec 05 08:58:23 crc kubenswrapper[4863]: E1205 08:58:23.677792 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="084f48bc-7cc9-4c09-862f-e0218ee9087a" containerName="telemetry-openstack-openstack-cell1" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.677798 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="084f48bc-7cc9-4c09-862f-e0218ee9087a" containerName="telemetry-openstack-openstack-cell1" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.678053 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f095ab-02c8-40cf-a79b-21d6c4329961" containerName="registry-server" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.678078 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="084f48bc-7cc9-4c09-862f-e0218ee9087a" containerName="telemetry-openstack-openstack-cell1" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.678971 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.681739 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.688741 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-jn22z"] Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750154 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750207 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750234 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750427 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750543 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg485\" (UniqueName: \"kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750580 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.750648 4863 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/084f48bc-7cc9-4c09-862f-e0218ee9087a-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853035 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853358 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853395 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853443 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853555 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg485\" (UniqueName: \"kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.853603 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.857422 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.857953 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.858076 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.858165 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.858540 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:23 crc kubenswrapper[4863]: I1205 08:58:23.869237 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg485\" (UniqueName: \"kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485\") pod \"neutron-sriov-openstack-openstack-cell1-jn22z\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:24 crc kubenswrapper[4863]: I1205 08:58:24.018706 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 08:58:24 crc kubenswrapper[4863]: I1205 08:58:24.582420 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-jn22z"] Dec 05 08:58:25 crc kubenswrapper[4863]: I1205 08:58:25.619213 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" event={"ID":"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567","Type":"ContainerStarted","Data":"2187789446a7340f0a600523f0fa681c13522d5c2c10ff743ac3c386a7e75cb2"} Dec 05 08:58:25 crc kubenswrapper[4863]: I1205 08:58:25.619552 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" event={"ID":"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567","Type":"ContainerStarted","Data":"3f6ed5833877f7c2c706c90c231f1501b1f49dee435b916d6854f6964bc0e814"} Dec 05 08:58:25 crc kubenswrapper[4863]: I1205 08:58:25.648871 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" podStartSLOduration=2.225565298 podStartE2EDuration="2.648853707s" podCreationTimestamp="2025-12-05 08:58:23 +0000 UTC" firstStartedPulling="2025-12-05 08:58:24.606163013 +0000 UTC m=+7932.332160053" lastFinishedPulling="2025-12-05 08:58:25.029451422 +0000 UTC m=+7932.755448462" observedRunningTime="2025-12-05 08:58:25.636617389 +0000 UTC m=+7933.362614449" watchObservedRunningTime="2025-12-05 08:58:25.648853707 +0000 UTC m=+7933.374850747" Dec 05 08:58:31 crc kubenswrapper[4863]: I1205 08:58:31.602058 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:58:31 crc kubenswrapper[4863]: E1205 08:58:31.602827 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:58:44 crc kubenswrapper[4863]: I1205 08:58:44.601887 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:58:44 crc kubenswrapper[4863]: E1205 08:58:44.602846 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:58:56 crc kubenswrapper[4863]: I1205 08:58:56.605382 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:58:56 crc kubenswrapper[4863]: E1205 08:58:56.606338 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:59:09 crc kubenswrapper[4863]: I1205 08:59:09.602465 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:59:09 crc kubenswrapper[4863]: E1205 08:59:09.603356 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:59:22 crc kubenswrapper[4863]: I1205 08:59:22.613805 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:59:22 crc kubenswrapper[4863]: E1205 08:59:22.615365 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:59:35 crc kubenswrapper[4863]: I1205 08:59:35.602804 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:59:35 crc kubenswrapper[4863]: E1205 08:59:35.603730 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 08:59:46 crc kubenswrapper[4863]: I1205 08:59:46.602170 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 08:59:46 crc kubenswrapper[4863]: E1205 08:59:46.605005 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.172818 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2"] Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.176679 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.179121 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.179417 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.198329 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2"] Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.359405 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.359609 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.359715 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjd7r\" (UniqueName: \"kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.462250 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.462699 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.462911 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjd7r\" (UniqueName: \"kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.463953 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.469235 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.481066 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjd7r\" (UniqueName: \"kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r\") pod \"collect-profiles-29415420-wwzf2\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.504049 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.607199 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:00:00 crc kubenswrapper[4863]: E1205 09:00:00.607553 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:00:00 crc kubenswrapper[4863]: I1205 09:00:00.961812 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2"] Dec 05 09:00:01 crc kubenswrapper[4863]: I1205 09:00:01.514634 4863 generic.go:334] "Generic (PLEG): container finished" podID="b0a1802d-bade-4e27-bb06-543d9bc1c9df" containerID="483e393a357acefb86903c17486348d69952c106b6001eb2df8a9201b78e1d7d" exitCode=0 Dec 05 09:00:01 crc kubenswrapper[4863]: I1205 09:00:01.514757 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" event={"ID":"b0a1802d-bade-4e27-bb06-543d9bc1c9df","Type":"ContainerDied","Data":"483e393a357acefb86903c17486348d69952c106b6001eb2df8a9201b78e1d7d"} Dec 05 09:00:01 crc kubenswrapper[4863]: I1205 09:00:01.515018 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" event={"ID":"b0a1802d-bade-4e27-bb06-543d9bc1c9df","Type":"ContainerStarted","Data":"b13ae7d23890b884af226c07848493136601b0888a62ba081b54841e2007307b"} Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.898784 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.930197 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume\") pod \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.930324 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume\") pod \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.930389 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjd7r\" (UniqueName: \"kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r\") pod \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\" (UID: \"b0a1802d-bade-4e27-bb06-543d9bc1c9df\") " Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.931971 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume" (OuterVolumeSpecName: "config-volume") pod "b0a1802d-bade-4e27-bb06-543d9bc1c9df" (UID: "b0a1802d-bade-4e27-bb06-543d9bc1c9df"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.932375 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0a1802d-bade-4e27-bb06-543d9bc1c9df-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.936442 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b0a1802d-bade-4e27-bb06-543d9bc1c9df" (UID: "b0a1802d-bade-4e27-bb06-543d9bc1c9df"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:02 crc kubenswrapper[4863]: I1205 09:00:02.936821 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r" (OuterVolumeSpecName: "kube-api-access-pjd7r") pod "b0a1802d-bade-4e27-bb06-543d9bc1c9df" (UID: "b0a1802d-bade-4e27-bb06-543d9bc1c9df"). InnerVolumeSpecName "kube-api-access-pjd7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.034199 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0a1802d-bade-4e27-bb06-543d9bc1c9df-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.034234 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjd7r\" (UniqueName: \"kubernetes.io/projected/b0a1802d-bade-4e27-bb06-543d9bc1c9df-kube-api-access-pjd7r\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.536276 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" event={"ID":"b0a1802d-bade-4e27-bb06-543d9bc1c9df","Type":"ContainerDied","Data":"b13ae7d23890b884af226c07848493136601b0888a62ba081b54841e2007307b"} Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.536326 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b13ae7d23890b884af226c07848493136601b0888a62ba081b54841e2007307b" Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.536353 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-wwzf2" Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.972020 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m"] Dec 05 09:00:03 crc kubenswrapper[4863]: I1205 09:00:03.980883 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-ght5m"] Dec 05 09:00:04 crc kubenswrapper[4863]: I1205 09:00:04.613909 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe2b1bc9-6eeb-4e15-861a-8f5401c69e69" path="/var/lib/kubelet/pods/fe2b1bc9-6eeb-4e15-861a-8f5401c69e69/volumes" Dec 05 09:00:14 crc kubenswrapper[4863]: I1205 09:00:14.601686 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:00:14 crc kubenswrapper[4863]: E1205 09:00:14.602417 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.657975 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:17 crc kubenswrapper[4863]: E1205 09:00:17.659069 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0a1802d-bade-4e27-bb06-543d9bc1c9df" containerName="collect-profiles" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.659111 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0a1802d-bade-4e27-bb06-543d9bc1c9df" containerName="collect-profiles" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.659602 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0a1802d-bade-4e27-bb06-543d9bc1c9df" containerName="collect-profiles" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.663255 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.674227 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.827745 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxzk4\" (UniqueName: \"kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.827838 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.827882 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.929831 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxzk4\" (UniqueName: \"kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.929932 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.929987 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.930465 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.930511 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.954165 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxzk4\" (UniqueName: \"kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4\") pod \"redhat-marketplace-k8bpb\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:17 crc kubenswrapper[4863]: I1205 09:00:17.989172 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:18 crc kubenswrapper[4863]: I1205 09:00:18.461671 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:18 crc kubenswrapper[4863]: I1205 09:00:18.702966 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerStarted","Data":"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848"} Dec 05 09:00:18 crc kubenswrapper[4863]: I1205 09:00:18.703271 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerStarted","Data":"b5ca063ccaaf310c746488d9db81b4c204a139bddba1f3ceb953d6e871a6cbdc"} Dec 05 09:00:19 crc kubenswrapper[4863]: I1205 09:00:19.715959 4863 generic.go:334] "Generic (PLEG): container finished" podID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerID="e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848" exitCode=0 Dec 05 09:00:19 crc kubenswrapper[4863]: I1205 09:00:19.716278 4863 generic.go:334] "Generic (PLEG): container finished" podID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerID="c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8" exitCode=0 Dec 05 09:00:19 crc kubenswrapper[4863]: I1205 09:00:19.716299 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerDied","Data":"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848"} Dec 05 09:00:19 crc kubenswrapper[4863]: I1205 09:00:19.716325 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerDied","Data":"c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8"} Dec 05 09:00:20 crc kubenswrapper[4863]: I1205 09:00:20.728638 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerStarted","Data":"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe"} Dec 05 09:00:20 crc kubenswrapper[4863]: I1205 09:00:20.752557 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k8bpb" podStartSLOduration=2.321198137 podStartE2EDuration="3.752537525s" podCreationTimestamp="2025-12-05 09:00:17 +0000 UTC" firstStartedPulling="2025-12-05 09:00:18.705052671 +0000 UTC m=+8046.431049711" lastFinishedPulling="2025-12-05 09:00:20.136392059 +0000 UTC m=+8047.862389099" observedRunningTime="2025-12-05 09:00:20.746772545 +0000 UTC m=+8048.472769615" watchObservedRunningTime="2025-12-05 09:00:20.752537525 +0000 UTC m=+8048.478534555" Dec 05 09:00:27 crc kubenswrapper[4863]: I1205 09:00:27.990127 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:27 crc kubenswrapper[4863]: I1205 09:00:27.990728 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:28 crc kubenswrapper[4863]: I1205 09:00:28.035773 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:28 crc kubenswrapper[4863]: I1205 09:00:28.602701 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:00:28 crc kubenswrapper[4863]: E1205 09:00:28.603604 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:00:28 crc kubenswrapper[4863]: I1205 09:00:28.871753 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:28 crc kubenswrapper[4863]: I1205 09:00:28.928393 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:30 crc kubenswrapper[4863]: I1205 09:00:30.831239 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k8bpb" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="registry-server" containerID="cri-o://57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe" gracePeriod=2 Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.346496 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.492599 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content\") pod \"f8065726-4b4e-4d04-b0fb-77d54100cb83\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.492739 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities\") pod \"f8065726-4b4e-4d04-b0fb-77d54100cb83\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.492898 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxzk4\" (UniqueName: \"kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4\") pod \"f8065726-4b4e-4d04-b0fb-77d54100cb83\" (UID: \"f8065726-4b4e-4d04-b0fb-77d54100cb83\") " Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.494030 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities" (OuterVolumeSpecName: "utilities") pod "f8065726-4b4e-4d04-b0fb-77d54100cb83" (UID: "f8065726-4b4e-4d04-b0fb-77d54100cb83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.512140 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8065726-4b4e-4d04-b0fb-77d54100cb83" (UID: "f8065726-4b4e-4d04-b0fb-77d54100cb83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.595131 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.595177 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8065726-4b4e-4d04-b0fb-77d54100cb83-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.951098 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4" (OuterVolumeSpecName: "kube-api-access-xxzk4") pod "f8065726-4b4e-4d04-b0fb-77d54100cb83" (UID: "f8065726-4b4e-4d04-b0fb-77d54100cb83"). InnerVolumeSpecName "kube-api-access-xxzk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.972560 4863 generic.go:334] "Generic (PLEG): container finished" podID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerID="57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe" exitCode=0 Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.972618 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerDied","Data":"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe"} Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.972649 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bpb" event={"ID":"f8065726-4b4e-4d04-b0fb-77d54100cb83","Type":"ContainerDied","Data":"b5ca063ccaaf310c746488d9db81b4c204a139bddba1f3ceb953d6e871a6cbdc"} Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.972667 4863 scope.go:117] "RemoveContainer" containerID="57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe" Dec 05 09:00:31 crc kubenswrapper[4863]: I1205 09:00:31.972790 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bpb" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.003089 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxzk4\" (UniqueName: \"kubernetes.io/projected/f8065726-4b4e-4d04-b0fb-77d54100cb83-kube-api-access-xxzk4\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.025658 4863 scope.go:117] "RemoveContainer" containerID="c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.037610 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.054642 4863 scope.go:117] "RemoveContainer" containerID="e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.056312 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bpb"] Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.103830 4863 scope.go:117] "RemoveContainer" containerID="57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe" Dec 05 09:00:32 crc kubenswrapper[4863]: E1205 09:00:32.104338 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe\": container with ID starting with 57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe not found: ID does not exist" containerID="57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.104436 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe"} err="failed to get container status \"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe\": rpc error: code = NotFound desc = could not find container \"57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe\": container with ID starting with 57d8edb38c87d685532708cc2ba8ab87d9d4511dfc4714d399cb39680de42ffe not found: ID does not exist" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.104748 4863 scope.go:117] "RemoveContainer" containerID="c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8" Dec 05 09:00:32 crc kubenswrapper[4863]: E1205 09:00:32.105135 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8\": container with ID starting with c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8 not found: ID does not exist" containerID="c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.105226 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8"} err="failed to get container status \"c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8\": rpc error: code = NotFound desc = could not find container \"c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8\": container with ID starting with c825bcce71dc5ff6a6dfb548c088277d4c40a4210988789698f6d22b134176e8 not found: ID does not exist" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.105328 4863 scope.go:117] "RemoveContainer" containerID="e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848" Dec 05 09:00:32 crc kubenswrapper[4863]: E1205 09:00:32.106163 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848\": container with ID starting with e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848 not found: ID does not exist" containerID="e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.106285 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848"} err="failed to get container status \"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848\": rpc error: code = NotFound desc = could not find container \"e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848\": container with ID starting with e118ebbb23441b97ac9b5c578761527584d379e2c489769c22859e859805d848 not found: ID does not exist" Dec 05 09:00:32 crc kubenswrapper[4863]: I1205 09:00:32.618228 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" path="/var/lib/kubelet/pods/f8065726-4b4e-4d04-b0fb-77d54100cb83/volumes" Dec 05 09:00:40 crc kubenswrapper[4863]: I1205 09:00:40.427019 4863 scope.go:117] "RemoveContainer" containerID="56ceaf9896b6a926c11e5239fd9e4188392784c271aaee8ee6ffcc6193508eb6" Dec 05 09:00:41 crc kubenswrapper[4863]: I1205 09:00:41.073574 4863 generic.go:334] "Generic (PLEG): container finished" podID="1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" containerID="2187789446a7340f0a600523f0fa681c13522d5c2c10ff743ac3c386a7e75cb2" exitCode=0 Dec 05 09:00:41 crc kubenswrapper[4863]: I1205 09:00:41.073668 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" event={"ID":"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567","Type":"ContainerDied","Data":"2187789446a7340f0a600523f0fa681c13522d5c2c10ff743ac3c386a7e75cb2"} Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.561575 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.711867 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg485\" (UniqueName: \"kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.711948 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.712007 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.712033 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.712246 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.712308 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key\") pod \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\" (UID: \"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567\") " Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.721971 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.722023 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph" (OuterVolumeSpecName: "ceph") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.722160 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485" (OuterVolumeSpecName: "kube-api-access-zg485") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "kube-api-access-zg485". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.743316 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.743371 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory" (OuterVolumeSpecName: "inventory") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.746232 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" (UID: "1e5562f8-cb23-40ea-a9a3-4de4f8ee6567"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815032 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815066 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815079 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg485\" (UniqueName: \"kubernetes.io/projected/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-kube-api-access-zg485\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815088 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815097 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:42 crc kubenswrapper[4863]: I1205 09:00:42.815108 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e5562f8-cb23-40ea-a9a3-4de4f8ee6567-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.093600 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" event={"ID":"1e5562f8-cb23-40ea-a9a3-4de4f8ee6567","Type":"ContainerDied","Data":"3f6ed5833877f7c2c706c90c231f1501b1f49dee435b916d6854f6964bc0e814"} Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.093651 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f6ed5833877f7c2c706c90c231f1501b1f49dee435b916d6854f6964bc0e814" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.093717 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-jn22z" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.205331 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-4t74k"] Dec 05 09:00:43 crc kubenswrapper[4863]: E1205 09:00:43.206004 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="registry-server" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206027 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="registry-server" Dec 05 09:00:43 crc kubenswrapper[4863]: E1205 09:00:43.206044 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" containerName="neutron-sriov-openstack-openstack-cell1" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206051 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" containerName="neutron-sriov-openstack-openstack-cell1" Dec 05 09:00:43 crc kubenswrapper[4863]: E1205 09:00:43.206077 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="extract-content" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206091 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="extract-content" Dec 05 09:00:43 crc kubenswrapper[4863]: E1205 09:00:43.206125 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="extract-utilities" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206132 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="extract-utilities" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206305 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e5562f8-cb23-40ea-a9a3-4de4f8ee6567" containerName="neutron-sriov-openstack-openstack-cell1" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.206333 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8065726-4b4e-4d04-b0fb-77d54100cb83" containerName="registry-server" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.207408 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.210106 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.210322 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.210545 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.210725 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.215382 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.228306 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-4t74k"] Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.334186 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd6tw\" (UniqueName: \"kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.334288 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.334329 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.334549 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.334890 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.335274 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.437653 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.437740 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.437795 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.437910 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.438004 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.438051 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd6tw\" (UniqueName: \"kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.442301 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.442301 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.443146 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.446549 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.453182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.462929 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd6tw\" (UniqueName: \"kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw\") pod \"neutron-dhcp-openstack-openstack-cell1-4t74k\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.524336 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:00:43 crc kubenswrapper[4863]: I1205 09:00:43.602744 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:00:43 crc kubenswrapper[4863]: E1205 09:00:43.603038 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:00:44 crc kubenswrapper[4863]: I1205 09:00:44.093782 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-4t74k"] Dec 05 09:00:44 crc kubenswrapper[4863]: I1205 09:00:44.101706 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:00:45 crc kubenswrapper[4863]: I1205 09:00:45.113973 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" event={"ID":"52510c93-a879-4c02-94b8-9142a9451a26","Type":"ContainerStarted","Data":"2d655e59fc3e8c3b15582563b8cf116518099abb23d0c18f2d19f813036541a4"} Dec 05 09:00:45 crc kubenswrapper[4863]: I1205 09:00:45.114605 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" event={"ID":"52510c93-a879-4c02-94b8-9142a9451a26","Type":"ContainerStarted","Data":"59047b343f6b92dc747c00abacaf8b6a11aa5fefe183c2f7f731407f9b1cbd42"} Dec 05 09:00:45 crc kubenswrapper[4863]: I1205 09:00:45.135888 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" podStartSLOduration=1.605421942 podStartE2EDuration="2.135869001s" podCreationTimestamp="2025-12-05 09:00:43 +0000 UTC" firstStartedPulling="2025-12-05 09:00:44.101487869 +0000 UTC m=+8071.827484909" lastFinishedPulling="2025-12-05 09:00:44.631934928 +0000 UTC m=+8072.357931968" observedRunningTime="2025-12-05 09:00:45.130778608 +0000 UTC m=+8072.856775648" watchObservedRunningTime="2025-12-05 09:00:45.135869001 +0000 UTC m=+8072.861866051" Dec 05 09:00:56 crc kubenswrapper[4863]: I1205 09:00:56.603305 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:00:56 crc kubenswrapper[4863]: E1205 09:00:56.604288 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.145998 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415421-vl2nr"] Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.147938 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.159677 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415421-vl2nr"] Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.188288 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.188751 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.189128 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.189272 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqr9c\" (UniqueName: \"kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.291311 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.291492 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.291544 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqr9c\" (UniqueName: \"kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.291632 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.298458 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.299007 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.309721 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqr9c\" (UniqueName: \"kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.310557 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data\") pod \"keystone-cron-29415421-vl2nr\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.482646 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:00 crc kubenswrapper[4863]: I1205 09:01:00.986095 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415421-vl2nr"] Dec 05 09:01:01 crc kubenswrapper[4863]: I1205 09:01:01.274247 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-vl2nr" event={"ID":"d9c7f0b6-16f9-4aed-82cc-df709176bed3","Type":"ContainerStarted","Data":"5a77729bf07f439d6d8583e691acca16d3ea8bb76348993f85fa0ca2dd8f8063"} Dec 05 09:01:01 crc kubenswrapper[4863]: I1205 09:01:01.274334 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-vl2nr" event={"ID":"d9c7f0b6-16f9-4aed-82cc-df709176bed3","Type":"ContainerStarted","Data":"2b048be5a54518ada9a0fd334635308db5cde7b8eaf45dec9f268cb058a7e4fc"} Dec 05 09:01:01 crc kubenswrapper[4863]: I1205 09:01:01.295424 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415421-vl2nr" podStartSLOduration=1.2954090329999999 podStartE2EDuration="1.295409033s" podCreationTimestamp="2025-12-05 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:01:01.293854215 +0000 UTC m=+8089.019851255" watchObservedRunningTime="2025-12-05 09:01:01.295409033 +0000 UTC m=+8089.021406073" Dec 05 09:01:04 crc kubenswrapper[4863]: I1205 09:01:04.303019 4863 generic.go:334] "Generic (PLEG): container finished" podID="d9c7f0b6-16f9-4aed-82cc-df709176bed3" containerID="5a77729bf07f439d6d8583e691acca16d3ea8bb76348993f85fa0ca2dd8f8063" exitCode=0 Dec 05 09:01:04 crc kubenswrapper[4863]: I1205 09:01:04.303118 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-vl2nr" event={"ID":"d9c7f0b6-16f9-4aed-82cc-df709176bed3","Type":"ContainerDied","Data":"5a77729bf07f439d6d8583e691acca16d3ea8bb76348993f85fa0ca2dd8f8063"} Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.680732 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.719057 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle\") pod \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.719200 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys\") pod \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.719276 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data\") pod \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.719331 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqr9c\" (UniqueName: \"kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c\") pod \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\" (UID: \"d9c7f0b6-16f9-4aed-82cc-df709176bed3\") " Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.726857 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d9c7f0b6-16f9-4aed-82cc-df709176bed3" (UID: "d9c7f0b6-16f9-4aed-82cc-df709176bed3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.726877 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c" (OuterVolumeSpecName: "kube-api-access-zqr9c") pod "d9c7f0b6-16f9-4aed-82cc-df709176bed3" (UID: "d9c7f0b6-16f9-4aed-82cc-df709176bed3"). InnerVolumeSpecName "kube-api-access-zqr9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.757644 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9c7f0b6-16f9-4aed-82cc-df709176bed3" (UID: "d9c7f0b6-16f9-4aed-82cc-df709176bed3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.786186 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data" (OuterVolumeSpecName: "config-data") pod "d9c7f0b6-16f9-4aed-82cc-df709176bed3" (UID: "d9c7f0b6-16f9-4aed-82cc-df709176bed3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.822396 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.822434 4863 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.822446 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c7f0b6-16f9-4aed-82cc-df709176bed3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:05 crc kubenswrapper[4863]: I1205 09:01:05.822457 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqr9c\" (UniqueName: \"kubernetes.io/projected/d9c7f0b6-16f9-4aed-82cc-df709176bed3-kube-api-access-zqr9c\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:06 crc kubenswrapper[4863]: I1205 09:01:06.324853 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-vl2nr" event={"ID":"d9c7f0b6-16f9-4aed-82cc-df709176bed3","Type":"ContainerDied","Data":"2b048be5a54518ada9a0fd334635308db5cde7b8eaf45dec9f268cb058a7e4fc"} Dec 05 09:01:06 crc kubenswrapper[4863]: I1205 09:01:06.324892 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b048be5a54518ada9a0fd334635308db5cde7b8eaf45dec9f268cb058a7e4fc" Dec 05 09:01:06 crc kubenswrapper[4863]: I1205 09:01:06.324945 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-vl2nr" Dec 05 09:01:08 crc kubenswrapper[4863]: I1205 09:01:08.602040 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:01:09 crc kubenswrapper[4863]: I1205 09:01:09.355987 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1"} Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.469616 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:01 crc kubenswrapper[4863]: E1205 09:03:01.472008 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9c7f0b6-16f9-4aed-82cc-df709176bed3" containerName="keystone-cron" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.472143 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9c7f0b6-16f9-4aed-82cc-df709176bed3" containerName="keystone-cron" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.472528 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9c7f0b6-16f9-4aed-82cc-df709176bed3" containerName="keystone-cron" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.474667 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.483220 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.587891 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.588281 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vf6z\" (UniqueName: \"kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.588343 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.690455 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.690712 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.690755 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vf6z\" (UniqueName: \"kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.691010 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.691401 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.711125 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vf6z\" (UniqueName: \"kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z\") pod \"redhat-operators-x5pff\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:01 crc kubenswrapper[4863]: I1205 09:03:01.808089 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:02 crc kubenswrapper[4863]: W1205 09:03:02.457814 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5acfa72_1f18_4a36_8d60_268fc1704ad6.slice/crio-73b52a93e4151f30a6347c96a5087f3eafb4c3fb32c6322d3b2c8a657052d905 WatchSource:0}: Error finding container 73b52a93e4151f30a6347c96a5087f3eafb4c3fb32c6322d3b2c8a657052d905: Status 404 returned error can't find the container with id 73b52a93e4151f30a6347c96a5087f3eafb4c3fb32c6322d3b2c8a657052d905 Dec 05 09:03:02 crc kubenswrapper[4863]: I1205 09:03:02.459079 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:02 crc kubenswrapper[4863]: I1205 09:03:02.512258 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerStarted","Data":"73b52a93e4151f30a6347c96a5087f3eafb4c3fb32c6322d3b2c8a657052d905"} Dec 05 09:03:03 crc kubenswrapper[4863]: I1205 09:03:03.523274 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerID="b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840" exitCode=0 Dec 05 09:03:03 crc kubenswrapper[4863]: I1205 09:03:03.523690 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerDied","Data":"b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840"} Dec 05 09:03:04 crc kubenswrapper[4863]: I1205 09:03:04.538369 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerStarted","Data":"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63"} Dec 05 09:03:07 crc kubenswrapper[4863]: I1205 09:03:07.565742 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerID="2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63" exitCode=0 Dec 05 09:03:07 crc kubenswrapper[4863]: I1205 09:03:07.565838 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerDied","Data":"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63"} Dec 05 09:03:08 crc kubenswrapper[4863]: I1205 09:03:08.463838 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:03:08 crc kubenswrapper[4863]: I1205 09:03:08.464744 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:03:08 crc kubenswrapper[4863]: I1205 09:03:08.579435 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerStarted","Data":"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4"} Dec 05 09:03:08 crc kubenswrapper[4863]: I1205 09:03:08.605739 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x5pff" podStartSLOduration=2.892095641 podStartE2EDuration="7.605721452s" podCreationTimestamp="2025-12-05 09:03:01 +0000 UTC" firstStartedPulling="2025-12-05 09:03:03.526644065 +0000 UTC m=+8211.252641105" lastFinishedPulling="2025-12-05 09:03:08.240269876 +0000 UTC m=+8215.966266916" observedRunningTime="2025-12-05 09:03:08.599359647 +0000 UTC m=+8216.325356697" watchObservedRunningTime="2025-12-05 09:03:08.605721452 +0000 UTC m=+8216.331718492" Dec 05 09:03:11 crc kubenswrapper[4863]: I1205 09:03:11.809022 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:11 crc kubenswrapper[4863]: I1205 09:03:11.809354 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:12 crc kubenswrapper[4863]: I1205 09:03:12.862559 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x5pff" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="registry-server" probeResult="failure" output=< Dec 05 09:03:12 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 09:03:12 crc kubenswrapper[4863]: > Dec 05 09:03:21 crc kubenswrapper[4863]: I1205 09:03:21.864264 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:21 crc kubenswrapper[4863]: I1205 09:03:21.915892 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:22 crc kubenswrapper[4863]: I1205 09:03:22.100114 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:23 crc kubenswrapper[4863]: I1205 09:03:23.729580 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x5pff" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="registry-server" containerID="cri-o://f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4" gracePeriod=2 Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.256071 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.368352 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities\") pod \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.368402 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content\") pod \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.368495 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vf6z\" (UniqueName: \"kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z\") pod \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\" (UID: \"c5acfa72-1f18-4a36-8d60-268fc1704ad6\") " Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.369360 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities" (OuterVolumeSpecName: "utilities") pod "c5acfa72-1f18-4a36-8d60-268fc1704ad6" (UID: "c5acfa72-1f18-4a36-8d60-268fc1704ad6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.374170 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z" (OuterVolumeSpecName: "kube-api-access-9vf6z") pod "c5acfa72-1f18-4a36-8d60-268fc1704ad6" (UID: "c5acfa72-1f18-4a36-8d60-268fc1704ad6"). InnerVolumeSpecName "kube-api-access-9vf6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.471324 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vf6z\" (UniqueName: \"kubernetes.io/projected/c5acfa72-1f18-4a36-8d60-268fc1704ad6-kube-api-access-9vf6z\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.471626 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.476744 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5acfa72-1f18-4a36-8d60-268fc1704ad6" (UID: "c5acfa72-1f18-4a36-8d60-268fc1704ad6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.574783 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5acfa72-1f18-4a36-8d60-268fc1704ad6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.739240 4863 generic.go:334] "Generic (PLEG): container finished" podID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerID="f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4" exitCode=0 Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.739279 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerDied","Data":"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4"} Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.739304 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x5pff" event={"ID":"c5acfa72-1f18-4a36-8d60-268fc1704ad6","Type":"ContainerDied","Data":"73b52a93e4151f30a6347c96a5087f3eafb4c3fb32c6322d3b2c8a657052d905"} Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.739302 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x5pff" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.739315 4863 scope.go:117] "RemoveContainer" containerID="f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.766308 4863 scope.go:117] "RemoveContainer" containerID="2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.766510 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.778489 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x5pff"] Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.787498 4863 scope.go:117] "RemoveContainer" containerID="b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.851917 4863 scope.go:117] "RemoveContainer" containerID="f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4" Dec 05 09:03:24 crc kubenswrapper[4863]: E1205 09:03:24.852500 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4\": container with ID starting with f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4 not found: ID does not exist" containerID="f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.852561 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4"} err="failed to get container status \"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4\": rpc error: code = NotFound desc = could not find container \"f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4\": container with ID starting with f2bdb034823acc1c6d92793110303521d41afe31c77e0917658ac5d648d30ca4 not found: ID does not exist" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.852596 4863 scope.go:117] "RemoveContainer" containerID="2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63" Dec 05 09:03:24 crc kubenswrapper[4863]: E1205 09:03:24.853897 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63\": container with ID starting with 2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63 not found: ID does not exist" containerID="2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.853968 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63"} err="failed to get container status \"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63\": rpc error: code = NotFound desc = could not find container \"2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63\": container with ID starting with 2120b2d480816ae2d09333a1f7fa8775f6e11beab8e1cab0118b6895848f6d63 not found: ID does not exist" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.854007 4863 scope.go:117] "RemoveContainer" containerID="b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840" Dec 05 09:03:24 crc kubenswrapper[4863]: E1205 09:03:24.854511 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840\": container with ID starting with b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840 not found: ID does not exist" containerID="b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840" Dec 05 09:03:24 crc kubenswrapper[4863]: I1205 09:03:24.854550 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840"} err="failed to get container status \"b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840\": rpc error: code = NotFound desc = could not find container \"b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840\": container with ID starting with b985f8ef2d7dbc4deb6e10244af4fdea13ebb9defbb067a9878acec945320840 not found: ID does not exist" Dec 05 09:03:26 crc kubenswrapper[4863]: I1205 09:03:26.632048 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" path="/var/lib/kubelet/pods/c5acfa72-1f18-4a36-8d60-268fc1704ad6/volumes" Dec 05 09:03:38 crc kubenswrapper[4863]: I1205 09:03:38.464341 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:03:38 crc kubenswrapper[4863]: I1205 09:03:38.464962 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:04:08 crc kubenswrapper[4863]: I1205 09:04:08.463903 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:04:08 crc kubenswrapper[4863]: I1205 09:04:08.464574 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:04:08 crc kubenswrapper[4863]: I1205 09:04:08.464636 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:04:08 crc kubenswrapper[4863]: I1205 09:04:08.465625 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:04:08 crc kubenswrapper[4863]: I1205 09:04:08.465732 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1" gracePeriod=600 Dec 05 09:04:09 crc kubenswrapper[4863]: I1205 09:04:09.166096 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1" exitCode=0 Dec 05 09:04:09 crc kubenswrapper[4863]: I1205 09:04:09.166211 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1"} Dec 05 09:04:09 crc kubenswrapper[4863]: I1205 09:04:09.167039 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6"} Dec 05 09:04:09 crc kubenswrapper[4863]: I1205 09:04:09.167074 4863 scope.go:117] "RemoveContainer" containerID="ad4b9a4628bb3645bc8d336c1004452de60cb0d4ae01fce70732d5c710d9e0cb" Dec 05 09:04:46 crc kubenswrapper[4863]: I1205 09:04:46.515740 4863 generic.go:334] "Generic (PLEG): container finished" podID="52510c93-a879-4c02-94b8-9142a9451a26" containerID="2d655e59fc3e8c3b15582563b8cf116518099abb23d0c18f2d19f813036541a4" exitCode=0 Dec 05 09:04:46 crc kubenswrapper[4863]: I1205 09:04:46.515840 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" event={"ID":"52510c93-a879-4c02-94b8-9142a9451a26","Type":"ContainerDied","Data":"2d655e59fc3e8c3b15582563b8cf116518099abb23d0c18f2d19f813036541a4"} Dec 05 09:04:47 crc kubenswrapper[4863]: I1205 09:04:47.992008 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.054608 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.055034 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.055324 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.055438 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd6tw\" (UniqueName: \"kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.055570 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.055680 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0\") pod \"52510c93-a879-4c02-94b8-9142a9451a26\" (UID: \"52510c93-a879-4c02-94b8-9142a9451a26\") " Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.060133 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.060637 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph" (OuterVolumeSpecName: "ceph") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.061520 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw" (OuterVolumeSpecName: "kube-api-access-dd6tw") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "kube-api-access-dd6tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.084551 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.086005 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory" (OuterVolumeSpecName: "inventory") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.087941 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "52510c93-a879-4c02-94b8-9142a9451a26" (UID: "52510c93-a879-4c02-94b8-9142a9451a26"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158129 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158167 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158182 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd6tw\" (UniqueName: \"kubernetes.io/projected/52510c93-a879-4c02-94b8-9142a9451a26-kube-api-access-dd6tw\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158196 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158209 4863 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.158220 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/52510c93-a879-4c02-94b8-9142a9451a26-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.539906 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" event={"ID":"52510c93-a879-4c02-94b8-9142a9451a26","Type":"ContainerDied","Data":"59047b343f6b92dc747c00abacaf8b6a11aa5fefe183c2f7f731407f9b1cbd42"} Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.539948 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59047b343f6b92dc747c00abacaf8b6a11aa5fefe183c2f7f731407f9b1cbd42" Dec 05 09:04:48 crc kubenswrapper[4863]: I1205 09:04:48.539985 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-4t74k" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.985492 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:04:54 crc kubenswrapper[4863]: E1205 09:04:54.986385 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="extract-utilities" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.986397 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="extract-utilities" Dec 05 09:04:54 crc kubenswrapper[4863]: E1205 09:04:54.986409 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="registry-server" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.986415 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="registry-server" Dec 05 09:04:54 crc kubenswrapper[4863]: E1205 09:04:54.986433 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52510c93-a879-4c02-94b8-9142a9451a26" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.986439 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="52510c93-a879-4c02-94b8-9142a9451a26" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 05 09:04:54 crc kubenswrapper[4863]: E1205 09:04:54.986463 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="extract-content" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.990720 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="extract-content" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.991076 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5acfa72-1f18-4a36-8d60-268fc1704ad6" containerName="registry-server" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.991092 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="52510c93-a879-4c02-94b8-9142a9451a26" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 05 09:04:54 crc kubenswrapper[4863]: I1205 09:04:54.992584 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.009684 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.098720 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.099065 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m58c\" (UniqueName: \"kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.099426 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.201888 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.202052 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m58c\" (UniqueName: \"kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.202203 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.202854 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.202854 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.220660 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m58c\" (UniqueName: \"kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c\") pod \"certified-operators-cmtjn\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.327000 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:04:55 crc kubenswrapper[4863]: I1205 09:04:55.751995 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:04:56 crc kubenswrapper[4863]: I1205 09:04:56.619805 4863 generic.go:334] "Generic (PLEG): container finished" podID="1dfca984-4354-4267-a97e-263c31282091" containerID="8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650" exitCode=0 Dec 05 09:04:56 crc kubenswrapper[4863]: I1205 09:04:56.619850 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerDied","Data":"8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650"} Dec 05 09:04:56 crc kubenswrapper[4863]: I1205 09:04:56.620355 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerStarted","Data":"acd25bb87ba3c8e1f22c6f5e786ec8120e846a52662f5e4abf5c2f19d32e5d79"} Dec 05 09:04:58 crc kubenswrapper[4863]: I1205 09:04:58.640650 4863 generic.go:334] "Generic (PLEG): container finished" podID="1dfca984-4354-4267-a97e-263c31282091" containerID="0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1" exitCode=0 Dec 05 09:04:58 crc kubenswrapper[4863]: I1205 09:04:58.640760 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerDied","Data":"0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1"} Dec 05 09:04:59 crc kubenswrapper[4863]: I1205 09:04:59.656027 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerStarted","Data":"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c"} Dec 05 09:04:59 crc kubenswrapper[4863]: I1205 09:04:59.690043 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cmtjn" podStartSLOduration=2.98418423 podStartE2EDuration="5.69001982s" podCreationTimestamp="2025-12-05 09:04:54 +0000 UTC" firstStartedPulling="2025-12-05 09:04:56.623924082 +0000 UTC m=+8324.349921122" lastFinishedPulling="2025-12-05 09:04:59.329759672 +0000 UTC m=+8327.055756712" observedRunningTime="2025-12-05 09:04:59.674298526 +0000 UTC m=+8327.400295576" watchObservedRunningTime="2025-12-05 09:04:59.69001982 +0000 UTC m=+8327.416016870" Dec 05 09:05:05 crc kubenswrapper[4863]: I1205 09:05:05.328236 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:05 crc kubenswrapper[4863]: I1205 09:05:05.328874 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:05 crc kubenswrapper[4863]: I1205 09:05:05.391340 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:05 crc kubenswrapper[4863]: I1205 09:05:05.774954 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:05 crc kubenswrapper[4863]: I1205 09:05:05.829574 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:05:07 crc kubenswrapper[4863]: I1205 09:05:07.740740 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cmtjn" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="registry-server" containerID="cri-o://80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c" gracePeriod=2 Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.230920 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.378367 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m58c\" (UniqueName: \"kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c\") pod \"1dfca984-4354-4267-a97e-263c31282091\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.378597 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities\") pod \"1dfca984-4354-4267-a97e-263c31282091\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.378817 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content\") pod \"1dfca984-4354-4267-a97e-263c31282091\" (UID: \"1dfca984-4354-4267-a97e-263c31282091\") " Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.379537 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities" (OuterVolumeSpecName: "utilities") pod "1dfca984-4354-4267-a97e-263c31282091" (UID: "1dfca984-4354-4267-a97e-263c31282091"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.383299 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c" (OuterVolumeSpecName: "kube-api-access-2m58c") pod "1dfca984-4354-4267-a97e-263c31282091" (UID: "1dfca984-4354-4267-a97e-263c31282091"). InnerVolumeSpecName "kube-api-access-2m58c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.480539 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m58c\" (UniqueName: \"kubernetes.io/projected/1dfca984-4354-4267-a97e-263c31282091-kube-api-access-2m58c\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.480571 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.696990 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1dfca984-4354-4267-a97e-263c31282091" (UID: "1dfca984-4354-4267-a97e-263c31282091"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.754604 4863 generic.go:334] "Generic (PLEG): container finished" podID="1dfca984-4354-4267-a97e-263c31282091" containerID="80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c" exitCode=0 Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.754653 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerDied","Data":"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c"} Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.754678 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmtjn" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.754703 4863 scope.go:117] "RemoveContainer" containerID="80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.754686 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmtjn" event={"ID":"1dfca984-4354-4267-a97e-263c31282091","Type":"ContainerDied","Data":"acd25bb87ba3c8e1f22c6f5e786ec8120e846a52662f5e4abf5c2f19d32e5d79"} Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.785527 4863 scope.go:117] "RemoveContainer" containerID="0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.789586 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dfca984-4354-4267-a97e-263c31282091-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.805161 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.814743 4863 scope.go:117] "RemoveContainer" containerID="8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.818240 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cmtjn"] Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.876230 4863 scope.go:117] "RemoveContainer" containerID="80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c" Dec 05 09:05:08 crc kubenswrapper[4863]: E1205 09:05:08.876801 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c\": container with ID starting with 80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c not found: ID does not exist" containerID="80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.876878 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c"} err="failed to get container status \"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c\": rpc error: code = NotFound desc = could not find container \"80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c\": container with ID starting with 80f17d869280308cbe3d7cb6fce39cf0ae337ceb90f04247fe139701f213e11c not found: ID does not exist" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.876919 4863 scope.go:117] "RemoveContainer" containerID="0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1" Dec 05 09:05:08 crc kubenswrapper[4863]: E1205 09:05:08.877546 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1\": container with ID starting with 0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1 not found: ID does not exist" containerID="0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.877573 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1"} err="failed to get container status \"0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1\": rpc error: code = NotFound desc = could not find container \"0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1\": container with ID starting with 0062099411ec28dae977d73688c06d0bd827c4a7deb79766eddac89434076bc1 not found: ID does not exist" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.877596 4863 scope.go:117] "RemoveContainer" containerID="8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650" Dec 05 09:05:08 crc kubenswrapper[4863]: E1205 09:05:08.877969 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650\": container with ID starting with 8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650 not found: ID does not exist" containerID="8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650" Dec 05 09:05:08 crc kubenswrapper[4863]: I1205 09:05:08.878019 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650"} err="failed to get container status \"8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650\": rpc error: code = NotFound desc = could not find container \"8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650\": container with ID starting with 8191eaf1a1a0a0ff8ef50937f61c2a95c1258621d5f88c3576e34b92809b9650 not found: ID does not exist" Dec 05 09:05:10 crc kubenswrapper[4863]: I1205 09:05:10.620136 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dfca984-4354-4267-a97e-263c31282091" path="/var/lib/kubelet/pods/1dfca984-4354-4267-a97e-263c31282091/volumes" Dec 05 09:05:12 crc kubenswrapper[4863]: I1205 09:05:12.865623 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:12 crc kubenswrapper[4863]: I1205 09:05:12.866392 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerName="nova-cell0-conductor-conductor" containerID="cri-o://3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" gracePeriod=30 Dec 05 09:05:12 crc kubenswrapper[4863]: I1205 09:05:12.890380 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:12 crc kubenswrapper[4863]: I1205 09:05:12.890865 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="c8263033-2370-4ebb-9eef-74f211520a1a" containerName="nova-cell1-conductor-conductor" containerID="cri-o://d35e0e4f743899c93c75be36ac6fc6e73301a084d29f11841c08ca72ed79cdc3" gracePeriod=30 Dec 05 09:05:13 crc kubenswrapper[4863]: E1205 09:05:13.597797 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 09:05:13 crc kubenswrapper[4863]: E1205 09:05:13.600067 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 09:05:13 crc kubenswrapper[4863]: E1205 09:05:13.601308 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 09:05:13 crc kubenswrapper[4863]: E1205 09:05:13.601343 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerName="nova-cell0-conductor-conductor" Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.809702 4863 generic.go:334] "Generic (PLEG): container finished" podID="c8263033-2370-4ebb-9eef-74f211520a1a" containerID="d35e0e4f743899c93c75be36ac6fc6e73301a084d29f11841c08ca72ed79cdc3" exitCode=0 Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.809952 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c8263033-2370-4ebb-9eef-74f211520a1a","Type":"ContainerDied","Data":"d35e0e4f743899c93c75be36ac6fc6e73301a084d29f11841c08ca72ed79cdc3"} Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.937264 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.957449 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.957738 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-api" containerID="cri-o://4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6" gracePeriod=30 Dec 05 09:05:13 crc kubenswrapper[4863]: I1205 09:05:13.957702 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-log" containerID="cri-o://4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275" gracePeriod=30 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.008714 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.008952 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" containerName="nova-scheduler-scheduler" containerID="cri-o://5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" gracePeriod=30 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.040099 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.040375 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" containerID="cri-o://5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597" gracePeriod=30 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.040806 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" containerID="cri-o://3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c" gracePeriod=30 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.102925 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle\") pod \"c8263033-2370-4ebb-9eef-74f211520a1a\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.103189 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data\") pod \"c8263033-2370-4ebb-9eef-74f211520a1a\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.103282 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6bzl\" (UniqueName: \"kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl\") pod \"c8263033-2370-4ebb-9eef-74f211520a1a\" (UID: \"c8263033-2370-4ebb-9eef-74f211520a1a\") " Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.110683 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl" (OuterVolumeSpecName: "kube-api-access-l6bzl") pod "c8263033-2370-4ebb-9eef-74f211520a1a" (UID: "c8263033-2370-4ebb-9eef-74f211520a1a"). InnerVolumeSpecName "kube-api-access-l6bzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.138408 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8263033-2370-4ebb-9eef-74f211520a1a" (UID: "c8263033-2370-4ebb-9eef-74f211520a1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.157207 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data" (OuterVolumeSpecName: "config-data") pod "c8263033-2370-4ebb-9eef-74f211520a1a" (UID: "c8263033-2370-4ebb-9eef-74f211520a1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.205309 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.205563 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8263033-2370-4ebb-9eef-74f211520a1a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.205633 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6bzl\" (UniqueName: \"kubernetes.io/projected/c8263033-2370-4ebb-9eef-74f211520a1a-kube-api-access-l6bzl\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.822210 4863 generic.go:334] "Generic (PLEG): container finished" podID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerID="5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597" exitCode=143 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.822302 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerDied","Data":"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597"} Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.826083 4863 generic.go:334] "Generic (PLEG): container finished" podID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerID="4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275" exitCode=143 Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.826165 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerDied","Data":"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275"} Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.828584 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c8263033-2370-4ebb-9eef-74f211520a1a","Type":"ContainerDied","Data":"54d1ef8ff1737ade0aeec39a63977a5f9a57a352c6155765d295a941f8d0d37e"} Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.828639 4863 scope.go:117] "RemoveContainer" containerID="d35e0e4f743899c93c75be36ac6fc6e73301a084d29f11841c08ca72ed79cdc3" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.828691 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.853418 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.864226 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.881211 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:14 crc kubenswrapper[4863]: E1205 09:05:14.881749 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="extract-content" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.881771 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="extract-content" Dec 05 09:05:14 crc kubenswrapper[4863]: E1205 09:05:14.881811 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="extract-utilities" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.881820 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="extract-utilities" Dec 05 09:05:14 crc kubenswrapper[4863]: E1205 09:05:14.881829 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="registry-server" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.881838 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="registry-server" Dec 05 09:05:14 crc kubenswrapper[4863]: E1205 09:05:14.881866 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8263033-2370-4ebb-9eef-74f211520a1a" containerName="nova-cell1-conductor-conductor" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.881874 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8263033-2370-4ebb-9eef-74f211520a1a" containerName="nova-cell1-conductor-conductor" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.882124 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dfca984-4354-4267-a97e-263c31282091" containerName="registry-server" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.882157 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8263033-2370-4ebb-9eef-74f211520a1a" containerName="nova-cell1-conductor-conductor" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.883127 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.885920 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 09:05:14 crc kubenswrapper[4863]: I1205 09:05:14.892853 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.021455 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.022104 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b2cc\" (UniqueName: \"kubernetes.io/projected/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-kube-api-access-5b2cc\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.022311 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: E1205 09:05:15.067272 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 09:05:15 crc kubenswrapper[4863]: E1205 09:05:15.068778 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 09:05:15 crc kubenswrapper[4863]: E1205 09:05:15.073817 4863 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 09:05:15 crc kubenswrapper[4863]: E1205 09:05:15.073876 4863 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" containerName="nova-scheduler-scheduler" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.124560 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b2cc\" (UniqueName: \"kubernetes.io/projected/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-kube-api-access-5b2cc\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.124642 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.124690 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.129373 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.129507 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.143117 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b2cc\" (UniqueName: \"kubernetes.io/projected/8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a-kube-api-access-5b2cc\") pod \"nova-cell1-conductor-0\" (UID: \"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a\") " pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.204008 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.724162 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 09:05:15 crc kubenswrapper[4863]: I1205 09:05:15.845087 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a","Type":"ContainerStarted","Data":"d8ae2ea9abc2f34f72579e9aec3530b28019bcdded34597cbd31f309b3062f67"} Dec 05 09:05:16 crc kubenswrapper[4863]: I1205 09:05:16.618783 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8263033-2370-4ebb-9eef-74f211520a1a" path="/var/lib/kubelet/pods/c8263033-2370-4ebb-9eef-74f211520a1a/volumes" Dec 05 09:05:16 crc kubenswrapper[4863]: I1205 09:05:16.858492 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a","Type":"ContainerStarted","Data":"2961fd6f928fdafa06fba72f09eff374ac03224452ce78c5df4894690a0ec1ea"} Dec 05 09:05:16 crc kubenswrapper[4863]: I1205 09:05:16.858639 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:16 crc kubenswrapper[4863]: I1205 09:05:16.876421 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.876399902 podStartE2EDuration="2.876399902s" podCreationTimestamp="2025-12-05 09:05:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:05:16.874363592 +0000 UTC m=+8344.600360652" watchObservedRunningTime="2025-12-05 09:05:16.876399902 +0000 UTC m=+8344.602396942" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.193040 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": read tcp 10.217.0.2:38350->10.217.1.77:8775: read: connection reset by peer" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.193869 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": read tcp 10.217.0.2:38364->10.217.1.77:8775: read: connection reset by peer" Dec 05 09:05:17 crc kubenswrapper[4863]: E1205 09:05:17.261221 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc66091e0_5ab3_4217_9855_8b8be3129d9e.slice/crio-conmon-4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc66091e0_5ab3_4217_9855_8b8be3129d9e.slice/crio-4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6.scope\": RecentStats: unable to find data in memory cache]" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.848623 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.854969 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.909101 4863 generic.go:334] "Generic (PLEG): container finished" podID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerID="4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6" exitCode=0 Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.909175 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerDied","Data":"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6"} Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.909225 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c66091e0-5ab3-4217-9855-8b8be3129d9e","Type":"ContainerDied","Data":"ba6be816d67c2fb28d5f15fe7ce49a6cab78d773fa1c3f4c381f32eb198edc8c"} Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.909243 4863 scope.go:117] "RemoveContainer" containerID="4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.909366 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.911825 4863 generic.go:334] "Generic (PLEG): container finished" podID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerID="3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" exitCode=0 Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.911896 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3746ab9-9512-4d76-94fe-312dd6679c25","Type":"ContainerDied","Data":"3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55"} Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.915263 4863 generic.go:334] "Generic (PLEG): container finished" podID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerID="3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c" exitCode=0 Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.915526 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.916126 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerDied","Data":"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c"} Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.916154 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4762ada2-37ce-414a-91a6-c7f2ebeef46c","Type":"ContainerDied","Data":"d23f27a46fb769341db852e83359d389d61867e7bd71c4838cdcc9f2fcd7dab7"} Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.945667 4863 scope.go:117] "RemoveContainer" containerID="4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.964038 4863 scope.go:117] "RemoveContainer" containerID="4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6" Dec 05 09:05:17 crc kubenswrapper[4863]: E1205 09:05:17.968763 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6\": container with ID starting with 4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6 not found: ID does not exist" containerID="4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.968800 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6"} err="failed to get container status \"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6\": rpc error: code = NotFound desc = could not find container \"4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6\": container with ID starting with 4b9fd97a52c87e9169b1caabd465c365b7b836365ccc3f1de1a338cda45c2ee6 not found: ID does not exist" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.968821 4863 scope.go:117] "RemoveContainer" containerID="4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275" Dec 05 09:05:17 crc kubenswrapper[4863]: E1205 09:05:17.969381 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275\": container with ID starting with 4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275 not found: ID does not exist" containerID="4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.969430 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275"} err="failed to get container status \"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275\": rpc error: code = NotFound desc = could not find container \"4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275\": container with ID starting with 4fb2b544e71b2d7bed46834280d32e0608b0db5ef9109eb3f9e66006f8a24275 not found: ID does not exist" Dec 05 09:05:17 crc kubenswrapper[4863]: I1205 09:05:17.969458 4863 scope.go:117] "RemoveContainer" containerID="3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.006655 4863 scope.go:117] "RemoveContainer" containerID="5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007157 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data\") pod \"c66091e0-5ab3-4217-9855-8b8be3129d9e\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007426 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsp22\" (UniqueName: \"kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22\") pod \"c66091e0-5ab3-4217-9855-8b8be3129d9e\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007460 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wrxj\" (UniqueName: \"kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj\") pod \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007875 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs\") pod \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007917 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data\") pod \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007950 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs\") pod \"c66091e0-5ab3-4217-9855-8b8be3129d9e\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.007980 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle\") pod \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\" (UID: \"4762ada2-37ce-414a-91a6-c7f2ebeef46c\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.008015 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle\") pod \"c66091e0-5ab3-4217-9855-8b8be3129d9e\" (UID: \"c66091e0-5ab3-4217-9855-8b8be3129d9e\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.008858 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs" (OuterVolumeSpecName: "logs") pod "c66091e0-5ab3-4217-9855-8b8be3129d9e" (UID: "c66091e0-5ab3-4217-9855-8b8be3129d9e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.009243 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs" (OuterVolumeSpecName: "logs") pod "4762ada2-37ce-414a-91a6-c7f2ebeef46c" (UID: "4762ada2-37ce-414a-91a6-c7f2ebeef46c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.018790 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj" (OuterVolumeSpecName: "kube-api-access-7wrxj") pod "4762ada2-37ce-414a-91a6-c7f2ebeef46c" (UID: "4762ada2-37ce-414a-91a6-c7f2ebeef46c"). InnerVolumeSpecName "kube-api-access-7wrxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.018871 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22" (OuterVolumeSpecName: "kube-api-access-gsp22") pod "c66091e0-5ab3-4217-9855-8b8be3129d9e" (UID: "c66091e0-5ab3-4217-9855-8b8be3129d9e"). InnerVolumeSpecName "kube-api-access-gsp22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.048271 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data" (OuterVolumeSpecName: "config-data") pod "4762ada2-37ce-414a-91a6-c7f2ebeef46c" (UID: "4762ada2-37ce-414a-91a6-c7f2ebeef46c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.057202 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c66091e0-5ab3-4217-9855-8b8be3129d9e" (UID: "c66091e0-5ab3-4217-9855-8b8be3129d9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.061348 4863 scope.go:117] "RemoveContainer" containerID="3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.066675 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c\": container with ID starting with 3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c not found: ID does not exist" containerID="3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.066721 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c"} err="failed to get container status \"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c\": rpc error: code = NotFound desc = could not find container \"3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c\": container with ID starting with 3f699c54a319e110599c15c2cf005ee646983905118f4be07d0139d1a5fa8b8c not found: ID does not exist" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.066752 4863 scope.go:117] "RemoveContainer" containerID="5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.066953 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data" (OuterVolumeSpecName: "config-data") pod "c66091e0-5ab3-4217-9855-8b8be3129d9e" (UID: "c66091e0-5ab3-4217-9855-8b8be3129d9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.067307 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597\": container with ID starting with 5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597 not found: ID does not exist" containerID="5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.067331 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597"} err="failed to get container status \"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597\": rpc error: code = NotFound desc = could not find container \"5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597\": container with ID starting with 5a23c2bae244d52b729e5e5331a5bf0b8c39736b861b661aa606d963b868c597 not found: ID does not exist" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.080520 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112565 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112609 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66091e0-5ab3-4217-9855-8b8be3129d9e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112621 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsp22\" (UniqueName: \"kubernetes.io/projected/c66091e0-5ab3-4217-9855-8b8be3129d9e-kube-api-access-gsp22\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112634 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wrxj\" (UniqueName: \"kubernetes.io/projected/4762ada2-37ce-414a-91a6-c7f2ebeef46c-kube-api-access-7wrxj\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112648 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4762ada2-37ce-414a-91a6-c7f2ebeef46c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112659 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112669 4863 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66091e0-5ab3-4217-9855-8b8be3129d9e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.112806 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4762ada2-37ce-414a-91a6-c7f2ebeef46c" (UID: "4762ada2-37ce-414a-91a6-c7f2ebeef46c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.122312 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d"] Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.125166 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-log" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125207 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-log" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.125234 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-api" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125243 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-api" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.125266 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125274 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.125315 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125324 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" Dec 05 09:05:18 crc kubenswrapper[4863]: E1205 09:05:18.125368 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerName="nova-cell0-conductor-conductor" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125375 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerName="nova-cell0-conductor-conductor" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125744 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" containerName="nova-cell0-conductor-conductor" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125766 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-metadata" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125781 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-log" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125803 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" containerName="nova-metadata-log" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.125824 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" containerName="nova-api-api" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.127924 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.129745 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-4v48r" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.133176 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.133429 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.133614 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.133781 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.133991 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.138845 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.177239 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.213213 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data\") pod \"e3746ab9-9512-4d76-94fe-312dd6679c25\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.213337 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7j69\" (UniqueName: \"kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69\") pod \"e3746ab9-9512-4d76-94fe-312dd6679c25\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.213372 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle\") pod \"e3746ab9-9512-4d76-94fe-312dd6679c25\" (UID: \"e3746ab9-9512-4d76-94fe-312dd6679c25\") " Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.214140 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4762ada2-37ce-414a-91a6-c7f2ebeef46c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.225030 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69" (OuterVolumeSpecName: "kube-api-access-w7j69") pod "e3746ab9-9512-4d76-94fe-312dd6679c25" (UID: "e3746ab9-9512-4d76-94fe-312dd6679c25"). InnerVolumeSpecName "kube-api-access-w7j69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.244440 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data" (OuterVolumeSpecName: "config-data") pod "e3746ab9-9512-4d76-94fe-312dd6679c25" (UID: "e3746ab9-9512-4d76-94fe-312dd6679c25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.263563 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3746ab9-9512-4d76-94fe-312dd6679c25" (UID: "e3746ab9-9512-4d76-94fe-312dd6679c25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316172 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316230 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316370 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316594 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316622 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316657 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l92z9\" (UniqueName: \"kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316699 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316774 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316802 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316846 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.316995 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.317075 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.317096 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7j69\" (UniqueName: \"kubernetes.io/projected/e3746ab9-9512-4d76-94fe-312dd6679c25-kube-api-access-w7j69\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.317108 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3746ab9-9512-4d76-94fe-312dd6679c25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.362396 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.378407 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.392627 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.394641 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.398972 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.402896 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.413745 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.418795 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.418889 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.418950 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.418991 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419102 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419134 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419169 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l92z9\" (UniqueName: \"kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419206 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419254 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419288 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.419332 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.420712 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.423794 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.424509 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.425001 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.427011 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.429179 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.429603 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.433131 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.433308 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.433441 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.433542 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.449144 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.453095 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l92z9\" (UniqueName: \"kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.453694 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.456241 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.457018 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.462186 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.521149 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68c101ff-556e-4b29-8b6c-1d38a8b51afe-logs\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.521314 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-config-data\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.521389 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.521437 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbfkv\" (UniqueName: \"kubernetes.io/projected/68c101ff-556e-4b29-8b6c-1d38a8b51afe-kube-api-access-xbfkv\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.617321 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4762ada2-37ce-414a-91a6-c7f2ebeef46c" path="/var/lib/kubelet/pods/4762ada2-37ce-414a-91a6-c7f2ebeef46c/volumes" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.618664 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c66091e0-5ab3-4217-9855-8b8be3129d9e" path="/var/lib/kubelet/pods/c66091e0-5ab3-4217-9855-8b8be3129d9e/volumes" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.624335 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn8r7\" (UniqueName: \"kubernetes.io/projected/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-kube-api-access-mn8r7\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625210 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-config-data\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625256 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625297 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-config-data\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625367 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-logs\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625394 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625449 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbfkv\" (UniqueName: \"kubernetes.io/projected/68c101ff-556e-4b29-8b6c-1d38a8b51afe-kube-api-access-xbfkv\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.625539 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68c101ff-556e-4b29-8b6c-1d38a8b51afe-logs\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.626142 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68c101ff-556e-4b29-8b6c-1d38a8b51afe-logs\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.632788 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-config-data\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.645236 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c101ff-556e-4b29-8b6c-1d38a8b51afe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.650541 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbfkv\" (UniqueName: \"kubernetes.io/projected/68c101ff-556e-4b29-8b6c-1d38a8b51afe-kube-api-access-xbfkv\") pod \"nova-api-0\" (UID: \"68c101ff-556e-4b29-8b6c-1d38a8b51afe\") " pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.723130 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.727679 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.728325 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-config-data\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.728401 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-logs\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.728576 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn8r7\" (UniqueName: \"kubernetes.io/projected/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-kube-api-access-mn8r7\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.729079 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-logs\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.730737 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.732282 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-config-data\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.747052 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn8r7\" (UniqueName: \"kubernetes.io/projected/c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34-kube-api-access-mn8r7\") pod \"nova-metadata-0\" (UID: \"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34\") " pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.861759 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.967616 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.967618 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3746ab9-9512-4d76-94fe-312dd6679c25","Type":"ContainerDied","Data":"14ae5ebf35aa33330d54b9bf5c871dbd6dd56749dbf749e424f7b647d3c0282f"} Dec 05 09:05:18 crc kubenswrapper[4863]: I1205 09:05:18.968027 4863 scope.go:117] "RemoveContainer" containerID="3d44ea4e88e4a9b5db45bd58e8d560f464e458201ecadf24066bfd8cf4343a55" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.004323 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.020485 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.035882 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.037137 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.041387 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.053030 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.063969 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.138155 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.138340 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.138402 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz574\" (UniqueName: \"kubernetes.io/projected/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-kube-api-access-pz574\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.215026 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.242943 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.243436 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.243672 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz574\" (UniqueName: \"kubernetes.io/projected/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-kube-api-access-pz574\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.252504 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.252814 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.264134 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz574\" (UniqueName: \"kubernetes.io/projected/eb6088a8-bcfe-47a9-9808-6bfaf328e4fa-kube-api-access-pz574\") pod \"nova-cell0-conductor-0\" (UID: \"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa\") " pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.371027 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.383599 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 09:05:19 crc kubenswrapper[4863]: W1205 09:05:19.403848 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc20b40d0_0d53_4f0b_8ee6_ba7ed886ac34.slice/crio-c7e957e92f53d254c1725f1661b61e18148e5a2a68f541a635edc2b4470cc8e6 WatchSource:0}: Error finding container c7e957e92f53d254c1725f1661b61e18148e5a2a68f541a635edc2b4470cc8e6: Status 404 returned error can't find the container with id c7e957e92f53d254c1725f1661b61e18148e5a2a68f541a635edc2b4470cc8e6 Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.689390 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.863592 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle\") pod \"5d153922-7ce3-4480-82d8-c02ae2163538\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.863683 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data\") pod \"5d153922-7ce3-4480-82d8-c02ae2163538\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.863873 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pcrj\" (UniqueName: \"kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj\") pod \"5d153922-7ce3-4480-82d8-c02ae2163538\" (UID: \"5d153922-7ce3-4480-82d8-c02ae2163538\") " Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.874446 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj" (OuterVolumeSpecName: "kube-api-access-6pcrj") pod "5d153922-7ce3-4480-82d8-c02ae2163538" (UID: "5d153922-7ce3-4480-82d8-c02ae2163538"). InnerVolumeSpecName "kube-api-access-6pcrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.956055 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d153922-7ce3-4480-82d8-c02ae2163538" (UID: "5d153922-7ce3-4480-82d8-c02ae2163538"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.959953 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data" (OuterVolumeSpecName: "config-data") pod "5d153922-7ce3-4480-82d8-c02ae2163538" (UID: "5d153922-7ce3-4480-82d8-c02ae2163538"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.966389 4863 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.966433 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d153922-7ce3-4480-82d8-c02ae2163538-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.966441 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pcrj\" (UniqueName: \"kubernetes.io/projected/5d153922-7ce3-4480-82d8-c02ae2163538-kube-api-access-6pcrj\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.995597 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"68c101ff-556e-4b29-8b6c-1d38a8b51afe","Type":"ContainerStarted","Data":"1e82c35f04f886aa73f362745dba44d52174d296e923e301c5474a2bc9926df4"} Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.995650 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"68c101ff-556e-4b29-8b6c-1d38a8b51afe","Type":"ContainerStarted","Data":"ca4040c05aa54131df02d962b783089273d25c7b9c86939c20447e2da2be9913"} Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.995664 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"68c101ff-556e-4b29-8b6c-1d38a8b51afe","Type":"ContainerStarted","Data":"4d220073177f3149ed71adbff0e54836ae87b3184df483852300e924f4e1508b"} Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.999446 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34","Type":"ContainerStarted","Data":"3d885c897de1d9c50c6c13112e7c533d2f5e0be234c5f5cb22df8468984b29a1"} Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.999507 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34","Type":"ContainerStarted","Data":"b55c9e70d319c1a0e1350ff93c1d2426048b82d3212995ac1aa62fac48cdcd25"} Dec 05 09:05:19 crc kubenswrapper[4863]: I1205 09:05:19.999523 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34","Type":"ContainerStarted","Data":"c7e957e92f53d254c1725f1661b61e18148e5a2a68f541a635edc2b4470cc8e6"} Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.001930 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" event={"ID":"c0e462cb-31a4-47c8-8d0a-1bd2044719b0","Type":"ContainerStarted","Data":"091c7392636edc4657e8a44a62152ff082b6f9bcf5663df693e4558f60bd2aa8"} Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.001998 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" event={"ID":"c0e462cb-31a4-47c8-8d0a-1bd2044719b0","Type":"ContainerStarted","Data":"5d59055dd42a39409c8aab4ce161b4eb8d79eb5d7beb3a102a478312d85c2c5c"} Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.003826 4863 generic.go:334] "Generic (PLEG): container finished" podID="5d153922-7ce3-4480-82d8-c02ae2163538" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" exitCode=0 Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.003871 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.003926 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5d153922-7ce3-4480-82d8-c02ae2163538","Type":"ContainerDied","Data":"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c"} Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.003953 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5d153922-7ce3-4480-82d8-c02ae2163538","Type":"ContainerDied","Data":"7f4650d8f5aec9760acbb0718197d5430b80fed6c154d94c20061de57ec3fc64"} Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.003998 4863 scope.go:117] "RemoveContainer" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" Dec 05 09:05:20 crc kubenswrapper[4863]: W1205 09:05:20.005003 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb6088a8_bcfe_47a9_9808_6bfaf328e4fa.slice/crio-d3ad222d5069f58d671aeb733de976288e1407d90c41566923dd9a51dd66f6fc WatchSource:0}: Error finding container d3ad222d5069f58d671aeb733de976288e1407d90c41566923dd9a51dd66f6fc: Status 404 returned error can't find the container with id d3ad222d5069f58d671aeb733de976288e1407d90c41566923dd9a51dd66f6fc Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.005482 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.020981 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.020960339 podStartE2EDuration="2.020960339s" podCreationTimestamp="2025-12-05 09:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:05:20.01405086 +0000 UTC m=+8347.740047900" watchObservedRunningTime="2025-12-05 09:05:20.020960339 +0000 UTC m=+8347.746957379" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.029791 4863 scope.go:117] "RemoveContainer" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" Dec 05 09:05:20 crc kubenswrapper[4863]: E1205 09:05:20.031082 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c\": container with ID starting with 5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c not found: ID does not exist" containerID="5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.031126 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c"} err="failed to get container status \"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c\": rpc error: code = NotFound desc = could not find container \"5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c\": container with ID starting with 5b002d300c60c292c3e6c6202a5254ab2182cebbd2ef602a959ecf12c91d788c not found: ID does not exist" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.069407 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" podStartSLOduration=1.640080517 podStartE2EDuration="2.069384743s" podCreationTimestamp="2025-12-05 09:05:18 +0000 UTC" firstStartedPulling="2025-12-05 09:05:19.062781681 +0000 UTC m=+8346.788778721" lastFinishedPulling="2025-12-05 09:05:19.492085907 +0000 UTC m=+8347.218082947" observedRunningTime="2025-12-05 09:05:20.065875577 +0000 UTC m=+8347.791872627" watchObservedRunningTime="2025-12-05 09:05:20.069384743 +0000 UTC m=+8347.795381783" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.072823 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.072803657 podStartE2EDuration="2.072803657s" podCreationTimestamp="2025-12-05 09:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:05:20.043183453 +0000 UTC m=+8347.769180503" watchObservedRunningTime="2025-12-05 09:05:20.072803657 +0000 UTC m=+8347.798800717" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.109026 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.127798 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.142595 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:20 crc kubenswrapper[4863]: E1205 09:05:20.143085 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" containerName="nova-scheduler-scheduler" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.143105 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" containerName="nova-scheduler-scheduler" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.143320 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" containerName="nova-scheduler-scheduler" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.144122 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.147759 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.166689 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.246852 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.280305 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pswr\" (UniqueName: \"kubernetes.io/projected/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-kube-api-access-4pswr\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.280628 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.280918 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-config-data\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.383583 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-config-data\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.383807 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pswr\" (UniqueName: \"kubernetes.io/projected/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-kube-api-access-4pswr\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.383968 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.392175 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.401819 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-config-data\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.405173 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pswr\" (UniqueName: \"kubernetes.io/projected/cbb3dd65-ec81-45b1-9bc0-83466df03e4c-kube-api-access-4pswr\") pod \"nova-scheduler-0\" (UID: \"cbb3dd65-ec81-45b1-9bc0-83466df03e4c\") " pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.469670 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.626077 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d153922-7ce3-4480-82d8-c02ae2163538" path="/var/lib/kubelet/pods/5d153922-7ce3-4480-82d8-c02ae2163538/volumes" Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.627336 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3746ab9-9512-4d76-94fe-312dd6679c25" path="/var/lib/kubelet/pods/e3746ab9-9512-4d76-94fe-312dd6679c25/volumes" Dec 05 09:05:20 crc kubenswrapper[4863]: W1205 09:05:20.965626 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbb3dd65_ec81_45b1_9bc0_83466df03e4c.slice/crio-cba720f353f4d8b91b1e8edb40a15123cb5298bd444a490403c581f43f6dd632 WatchSource:0}: Error finding container cba720f353f4d8b91b1e8edb40a15123cb5298bd444a490403c581f43f6dd632: Status 404 returned error can't find the container with id cba720f353f4d8b91b1e8edb40a15123cb5298bd444a490403c581f43f6dd632 Dec 05 09:05:20 crc kubenswrapper[4863]: I1205 09:05:20.973049 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 09:05:21 crc kubenswrapper[4863]: I1205 09:05:21.015595 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cbb3dd65-ec81-45b1-9bc0-83466df03e4c","Type":"ContainerStarted","Data":"cba720f353f4d8b91b1e8edb40a15123cb5298bd444a490403c581f43f6dd632"} Dec 05 09:05:21 crc kubenswrapper[4863]: I1205 09:05:21.017905 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa","Type":"ContainerStarted","Data":"52db05c90d830824b9d5f6b81b1ff5c0c0d96a791280aba08189b49a040477e7"} Dec 05 09:05:21 crc kubenswrapper[4863]: I1205 09:05:21.017940 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eb6088a8-bcfe-47a9-9808-6bfaf328e4fa","Type":"ContainerStarted","Data":"d3ad222d5069f58d671aeb733de976288e1407d90c41566923dd9a51dd66f6fc"} Dec 05 09:05:21 crc kubenswrapper[4863]: I1205 09:05:21.018424 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:21 crc kubenswrapper[4863]: I1205 09:05:21.042458 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=3.042438214 podStartE2EDuration="3.042438214s" podCreationTimestamp="2025-12-05 09:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:05:21.03650912 +0000 UTC m=+8348.762506160" watchObservedRunningTime="2025-12-05 09:05:21.042438214 +0000 UTC m=+8348.768435254" Dec 05 09:05:22 crc kubenswrapper[4863]: I1205 09:05:22.032925 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cbb3dd65-ec81-45b1-9bc0-83466df03e4c","Type":"ContainerStarted","Data":"1b0433e5d529e48d1d14111a41bd899c2ad04eaafc4a0fc2b7f1bac002a90ca3"} Dec 05 09:05:22 crc kubenswrapper[4863]: I1205 09:05:22.051248 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.051228471 podStartE2EDuration="2.051228471s" podCreationTimestamp="2025-12-05 09:05:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:05:22.050274397 +0000 UTC m=+8349.776271437" watchObservedRunningTime="2025-12-05 09:05:22.051228471 +0000 UTC m=+8349.777225511" Dec 05 09:05:23 crc kubenswrapper[4863]: I1205 09:05:23.862744 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 09:05:23 crc kubenswrapper[4863]: I1205 09:05:23.863081 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 09:05:25 crc kubenswrapper[4863]: I1205 09:05:25.469906 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 09:05:28 crc kubenswrapper[4863]: I1205 09:05:28.724915 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 09:05:28 crc kubenswrapper[4863]: I1205 09:05:28.725527 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 09:05:28 crc kubenswrapper[4863]: I1205 09:05:28.862255 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 09:05:28 crc kubenswrapper[4863]: I1205 09:05:28.862314 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 09:05:29 crc kubenswrapper[4863]: I1205 09:05:29.405012 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 09:05:29 crc kubenswrapper[4863]: I1205 09:05:29.807712 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="68c101ff-556e-4b29-8b6c-1d38a8b51afe" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.167:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:05:29 crc kubenswrapper[4863]: I1205 09:05:29.807910 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="68c101ff-556e-4b29-8b6c-1d38a8b51afe" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.167:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:05:29 crc kubenswrapper[4863]: I1205 09:05:29.945734 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.168:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:05:29 crc kubenswrapper[4863]: I1205 09:05:29.945817 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.168:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:05:30 crc kubenswrapper[4863]: I1205 09:05:30.470245 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 09:05:30 crc kubenswrapper[4863]: I1205 09:05:30.504256 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 09:05:31 crc kubenswrapper[4863]: I1205 09:05:31.166417 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.728698 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.729218 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.729464 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.729512 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.731817 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.731978 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.864587 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.865126 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 09:05:38 crc kubenswrapper[4863]: I1205 09:05:38.866190 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 09:05:39 crc kubenswrapper[4863]: I1205 09:05:39.191086 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.091049 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z87r7"] Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.095510 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.140298 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z87r7"] Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.225738 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-utilities\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.225851 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cj7\" (UniqueName: \"kubernetes.io/projected/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-kube-api-access-r5cj7\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.225930 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-catalog-content\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.327861 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-utilities\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.327980 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cj7\" (UniqueName: \"kubernetes.io/projected/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-kube-api-access-r5cj7\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.328053 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-catalog-content\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.328499 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-utilities\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.328636 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-catalog-content\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.353329 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cj7\" (UniqueName: \"kubernetes.io/projected/35dcc30f-e85d-4a14-bbe5-67efc3530c0f-kube-api-access-r5cj7\") pod \"community-operators-z87r7\" (UID: \"35dcc30f-e85d-4a14-bbe5-67efc3530c0f\") " pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:55 crc kubenswrapper[4863]: I1205 09:05:55.433983 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:05:56 crc kubenswrapper[4863]: I1205 09:05:56.018124 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z87r7"] Dec 05 09:05:56 crc kubenswrapper[4863]: I1205 09:05:56.338287 4863 generic.go:334] "Generic (PLEG): container finished" podID="35dcc30f-e85d-4a14-bbe5-67efc3530c0f" containerID="f8bcbed21a0f769ae85c489827713c85a4d0909c9ebe71061526b3aa14e2615c" exitCode=0 Dec 05 09:05:56 crc kubenswrapper[4863]: I1205 09:05:56.338363 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z87r7" event={"ID":"35dcc30f-e85d-4a14-bbe5-67efc3530c0f","Type":"ContainerDied","Data":"f8bcbed21a0f769ae85c489827713c85a4d0909c9ebe71061526b3aa14e2615c"} Dec 05 09:05:56 crc kubenswrapper[4863]: I1205 09:05:56.338675 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z87r7" event={"ID":"35dcc30f-e85d-4a14-bbe5-67efc3530c0f","Type":"ContainerStarted","Data":"d28f5daa49da5809aaaa87c93eee7e5e0d63ec5a45275261a61876960616403e"} Dec 05 09:05:56 crc kubenswrapper[4863]: I1205 09:05:56.340230 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:06:00 crc kubenswrapper[4863]: I1205 09:06:00.382454 4863 generic.go:334] "Generic (PLEG): container finished" podID="35dcc30f-e85d-4a14-bbe5-67efc3530c0f" containerID="ecaef26c07e22a48be9d88f04e438c90defde1083c9ece2698bd6c64db42ec08" exitCode=0 Dec 05 09:06:00 crc kubenswrapper[4863]: I1205 09:06:00.382530 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z87r7" event={"ID":"35dcc30f-e85d-4a14-bbe5-67efc3530c0f","Type":"ContainerDied","Data":"ecaef26c07e22a48be9d88f04e438c90defde1083c9ece2698bd6c64db42ec08"} Dec 05 09:06:01 crc kubenswrapper[4863]: I1205 09:06:01.395833 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z87r7" event={"ID":"35dcc30f-e85d-4a14-bbe5-67efc3530c0f","Type":"ContainerStarted","Data":"35bd8ab69a97e928410aae009c4be9ab8f3ae5c748834be838f82f0938570eb2"} Dec 05 09:06:01 crc kubenswrapper[4863]: I1205 09:06:01.415217 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z87r7" podStartSLOduration=1.908396589 podStartE2EDuration="6.415184834s" podCreationTimestamp="2025-12-05 09:05:55 +0000 UTC" firstStartedPulling="2025-12-05 09:05:56.33998016 +0000 UTC m=+8384.065977200" lastFinishedPulling="2025-12-05 09:06:00.846768405 +0000 UTC m=+8388.572765445" observedRunningTime="2025-12-05 09:06:01.411298798 +0000 UTC m=+8389.137295858" watchObservedRunningTime="2025-12-05 09:06:01.415184834 +0000 UTC m=+8389.141181894" Dec 05 09:06:05 crc kubenswrapper[4863]: I1205 09:06:05.435028 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:06:05 crc kubenswrapper[4863]: I1205 09:06:05.435623 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:06:05 crc kubenswrapper[4863]: I1205 09:06:05.478042 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:06:06 crc kubenswrapper[4863]: I1205 09:06:06.488561 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z87r7" Dec 05 09:06:06 crc kubenswrapper[4863]: I1205 09:06:06.548107 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z87r7"] Dec 05 09:06:06 crc kubenswrapper[4863]: I1205 09:06:06.593015 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 09:06:06 crc kubenswrapper[4863]: I1205 09:06:06.593286 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4b4mm" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="registry-server" containerID="cri-o://b99887a54c58e691df1f103769e7e0dc4e4ff3a7d6ef383b1b39afa71ca651fd" gracePeriod=2 Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.455289 4863 generic.go:334] "Generic (PLEG): container finished" podID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerID="b99887a54c58e691df1f103769e7e0dc4e4ff3a7d6ef383b1b39afa71ca651fd" exitCode=0 Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.455439 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerDied","Data":"b99887a54c58e691df1f103769e7e0dc4e4ff3a7d6ef383b1b39afa71ca651fd"} Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.604790 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.716882 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8cbm\" (UniqueName: \"kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm\") pod \"60606549-d6e2-4fcd-98be-cb5ff7760f12\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.717005 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities\") pod \"60606549-d6e2-4fcd-98be-cb5ff7760f12\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.717201 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content\") pod \"60606549-d6e2-4fcd-98be-cb5ff7760f12\" (UID: \"60606549-d6e2-4fcd-98be-cb5ff7760f12\") " Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.719028 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities" (OuterVolumeSpecName: "utilities") pod "60606549-d6e2-4fcd-98be-cb5ff7760f12" (UID: "60606549-d6e2-4fcd-98be-cb5ff7760f12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.728038 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm" (OuterVolumeSpecName: "kube-api-access-f8cbm") pod "60606549-d6e2-4fcd-98be-cb5ff7760f12" (UID: "60606549-d6e2-4fcd-98be-cb5ff7760f12"). InnerVolumeSpecName "kube-api-access-f8cbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.803103 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60606549-d6e2-4fcd-98be-cb5ff7760f12" (UID: "60606549-d6e2-4fcd-98be-cb5ff7760f12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.819274 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.819313 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8cbm\" (UniqueName: \"kubernetes.io/projected/60606549-d6e2-4fcd-98be-cb5ff7760f12-kube-api-access-f8cbm\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:07 crc kubenswrapper[4863]: I1205 09:06:07.819324 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60606549-d6e2-4fcd-98be-cb5ff7760f12-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.463607 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.463669 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.468413 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4mm" event={"ID":"60606549-d6e2-4fcd-98be-cb5ff7760f12","Type":"ContainerDied","Data":"4ec4454cc983a37406a183fa76e9416374e01b2cfee1383547155ad4a5efd2e4"} Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.468503 4863 scope.go:117] "RemoveContainer" containerID="b99887a54c58e691df1f103769e7e0dc4e4ff3a7d6ef383b1b39afa71ca651fd" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.468530 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4mm" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.510559 4863 scope.go:117] "RemoveContainer" containerID="c39623f5979967ad92ef826c27d7d12712f02456319390d0dd94cbbadcfa2562" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.513720 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.526626 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4b4mm"] Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.538201 4863 scope.go:117] "RemoveContainer" containerID="f505a82e94b25fcbfec8a1a093517ef50c86ae93e8a672da550521d00b459974" Dec 05 09:06:08 crc kubenswrapper[4863]: I1205 09:06:08.625702 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" path="/var/lib/kubelet/pods/60606549-d6e2-4fcd-98be-cb5ff7760f12/volumes" Dec 05 09:06:08 crc kubenswrapper[4863]: E1205 09:06:08.666367 4863 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60606549_d6e2_4fcd_98be_cb5ff7760f12.slice/crio-4ec4454cc983a37406a183fa76e9416374e01b2cfee1383547155ad4a5efd2e4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60606549_d6e2_4fcd_98be_cb5ff7760f12.slice\": RecentStats: unable to find data in memory cache]" Dec 05 09:06:38 crc kubenswrapper[4863]: I1205 09:06:38.464161 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:06:38 crc kubenswrapper[4863]: I1205 09:06:38.464758 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:07:08 crc kubenswrapper[4863]: I1205 09:07:08.463873 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:07:08 crc kubenswrapper[4863]: I1205 09:07:08.464521 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:07:08 crc kubenswrapper[4863]: I1205 09:07:08.464571 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:07:08 crc kubenswrapper[4863]: I1205 09:07:08.465440 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:07:08 crc kubenswrapper[4863]: I1205 09:07:08.465519 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" gracePeriod=600 Dec 05 09:07:08 crc kubenswrapper[4863]: E1205 09:07:08.589300 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:07:09 crc kubenswrapper[4863]: I1205 09:07:09.178256 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" exitCode=0 Dec 05 09:07:09 crc kubenswrapper[4863]: I1205 09:07:09.178715 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6"} Dec 05 09:07:09 crc kubenswrapper[4863]: I1205 09:07:09.178847 4863 scope.go:117] "RemoveContainer" containerID="982c42481ffddae0a7f94c887784f3c9bac1911bad01393af21e1d462a575bf1" Dec 05 09:07:09 crc kubenswrapper[4863]: I1205 09:07:09.180839 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:07:09 crc kubenswrapper[4863]: E1205 09:07:09.181564 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:07:22 crc kubenswrapper[4863]: I1205 09:07:22.609291 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:07:22 crc kubenswrapper[4863]: E1205 09:07:22.610350 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:07:35 crc kubenswrapper[4863]: I1205 09:07:35.602035 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:07:35 crc kubenswrapper[4863]: E1205 09:07:35.602934 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:07:50 crc kubenswrapper[4863]: I1205 09:07:50.602918 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:07:50 crc kubenswrapper[4863]: E1205 09:07:50.603662 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:08:03 crc kubenswrapper[4863]: I1205 09:08:03.602411 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:08:03 crc kubenswrapper[4863]: E1205 09:08:03.603114 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:08:14 crc kubenswrapper[4863]: I1205 09:08:14.605938 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:08:14 crc kubenswrapper[4863]: E1205 09:08:14.606527 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:08:26 crc kubenswrapper[4863]: I1205 09:08:26.602549 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:08:26 crc kubenswrapper[4863]: E1205 09:08:26.603991 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:08:40 crc kubenswrapper[4863]: I1205 09:08:40.602678 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:08:40 crc kubenswrapper[4863]: E1205 09:08:40.605333 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:08:52 crc kubenswrapper[4863]: I1205 09:08:52.609457 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:08:52 crc kubenswrapper[4863]: E1205 09:08:52.610992 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:09:06 crc kubenswrapper[4863]: I1205 09:09:06.602543 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:09:06 crc kubenswrapper[4863]: E1205 09:09:06.603789 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:09:19 crc kubenswrapper[4863]: I1205 09:09:19.604662 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:09:19 crc kubenswrapper[4863]: E1205 09:09:19.605913 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:09:32 crc kubenswrapper[4863]: I1205 09:09:32.615892 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:09:32 crc kubenswrapper[4863]: E1205 09:09:32.617180 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:09:45 crc kubenswrapper[4863]: I1205 09:09:45.601921 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:09:45 crc kubenswrapper[4863]: E1205 09:09:45.602739 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:09:57 crc kubenswrapper[4863]: I1205 09:09:57.603528 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:09:57 crc kubenswrapper[4863]: E1205 09:09:57.604231 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:10:10 crc kubenswrapper[4863]: I1205 09:10:10.601761 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:10:10 crc kubenswrapper[4863]: E1205 09:10:10.602502 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.549365 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:19 crc kubenswrapper[4863]: E1205 09:10:19.550633 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="registry-server" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.550653 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="registry-server" Dec 05 09:10:19 crc kubenswrapper[4863]: E1205 09:10:19.550676 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="extract-utilities" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.550686 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="extract-utilities" Dec 05 09:10:19 crc kubenswrapper[4863]: E1205 09:10:19.550736 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="extract-content" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.550744 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="extract-content" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.551008 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="60606549-d6e2-4fcd-98be-cb5ff7760f12" containerName="registry-server" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.552891 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.570349 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.661515 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.661765 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dcxf\" (UniqueName: \"kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.661920 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.770348 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.771174 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dcxf\" (UniqueName: \"kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.771850 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.772233 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.773987 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.797879 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dcxf\" (UniqueName: \"kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf\") pod \"redhat-marketplace-stzll\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:19 crc kubenswrapper[4863]: I1205 09:10:19.885593 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:20 crc kubenswrapper[4863]: I1205 09:10:20.459018 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:21 crc kubenswrapper[4863]: I1205 09:10:21.089171 4863 generic.go:334] "Generic (PLEG): container finished" podID="8c4ad363-5123-4df9-bd83-427519403dd6" containerID="e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10" exitCode=0 Dec 05 09:10:21 crc kubenswrapper[4863]: I1205 09:10:21.089236 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerDied","Data":"e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10"} Dec 05 09:10:21 crc kubenswrapper[4863]: I1205 09:10:21.089534 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerStarted","Data":"2a644f9ed3e7a4eb030217249a217c79edfdcae07ca154188ded7300b57b0a08"} Dec 05 09:10:22 crc kubenswrapper[4863]: I1205 09:10:22.103059 4863 generic.go:334] "Generic (PLEG): container finished" podID="8c4ad363-5123-4df9-bd83-427519403dd6" containerID="d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a" exitCode=0 Dec 05 09:10:22 crc kubenswrapper[4863]: I1205 09:10:22.103179 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerDied","Data":"d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a"} Dec 05 09:10:23 crc kubenswrapper[4863]: I1205 09:10:23.117565 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerStarted","Data":"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84"} Dec 05 09:10:23 crc kubenswrapper[4863]: I1205 09:10:23.144304 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-stzll" podStartSLOduration=2.715184395 podStartE2EDuration="4.144281658s" podCreationTimestamp="2025-12-05 09:10:19 +0000 UTC" firstStartedPulling="2025-12-05 09:10:21.091292629 +0000 UTC m=+8648.817289679" lastFinishedPulling="2025-12-05 09:10:22.520389902 +0000 UTC m=+8650.246386942" observedRunningTime="2025-12-05 09:10:23.135550734 +0000 UTC m=+8650.861547784" watchObservedRunningTime="2025-12-05 09:10:23.144281658 +0000 UTC m=+8650.870278708" Dec 05 09:10:24 crc kubenswrapper[4863]: I1205 09:10:24.602299 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:10:24 crc kubenswrapper[4863]: E1205 09:10:24.602872 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:10:29 crc kubenswrapper[4863]: I1205 09:10:29.886181 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:29 crc kubenswrapper[4863]: I1205 09:10:29.886534 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:29 crc kubenswrapper[4863]: I1205 09:10:29.934392 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:30 crc kubenswrapper[4863]: I1205 09:10:30.224419 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:30 crc kubenswrapper[4863]: I1205 09:10:30.279772 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.197821 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-stzll" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="registry-server" containerID="cri-o://4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84" gracePeriod=2 Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.696774 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.834873 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content\") pod \"8c4ad363-5123-4df9-bd83-427519403dd6\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.834936 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities\") pod \"8c4ad363-5123-4df9-bd83-427519403dd6\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.835109 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dcxf\" (UniqueName: \"kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf\") pod \"8c4ad363-5123-4df9-bd83-427519403dd6\" (UID: \"8c4ad363-5123-4df9-bd83-427519403dd6\") " Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.835898 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities" (OuterVolumeSpecName: "utilities") pod "8c4ad363-5123-4df9-bd83-427519403dd6" (UID: "8c4ad363-5123-4df9-bd83-427519403dd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.840936 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf" (OuterVolumeSpecName: "kube-api-access-8dcxf") pod "8c4ad363-5123-4df9-bd83-427519403dd6" (UID: "8c4ad363-5123-4df9-bd83-427519403dd6"). InnerVolumeSpecName "kube-api-access-8dcxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.852772 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c4ad363-5123-4df9-bd83-427519403dd6" (UID: "8c4ad363-5123-4df9-bd83-427519403dd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.938067 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dcxf\" (UniqueName: \"kubernetes.io/projected/8c4ad363-5123-4df9-bd83-427519403dd6-kube-api-access-8dcxf\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.938113 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:32 crc kubenswrapper[4863]: I1205 09:10:32.938126 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c4ad363-5123-4df9-bd83-427519403dd6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.210965 4863 generic.go:334] "Generic (PLEG): container finished" podID="8c4ad363-5123-4df9-bd83-427519403dd6" containerID="4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84" exitCode=0 Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.211009 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerDied","Data":"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84"} Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.211037 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-stzll" event={"ID":"8c4ad363-5123-4df9-bd83-427519403dd6","Type":"ContainerDied","Data":"2a644f9ed3e7a4eb030217249a217c79edfdcae07ca154188ded7300b57b0a08"} Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.211053 4863 scope.go:117] "RemoveContainer" containerID="4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.211205 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-stzll" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.234196 4863 scope.go:117] "RemoveContainer" containerID="d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.261904 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.270847 4863 scope.go:117] "RemoveContainer" containerID="e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.272317 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-stzll"] Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.307905 4863 scope.go:117] "RemoveContainer" containerID="4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84" Dec 05 09:10:33 crc kubenswrapper[4863]: E1205 09:10:33.308545 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84\": container with ID starting with 4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84 not found: ID does not exist" containerID="4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.308587 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84"} err="failed to get container status \"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84\": rpc error: code = NotFound desc = could not find container \"4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84\": container with ID starting with 4f9e413fb34111996fe1f07a0a703cc8300a46661c603e4805b09cde89496f84 not found: ID does not exist" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.308614 4863 scope.go:117] "RemoveContainer" containerID="d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a" Dec 05 09:10:33 crc kubenswrapper[4863]: E1205 09:10:33.308976 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a\": container with ID starting with d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a not found: ID does not exist" containerID="d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.309018 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a"} err="failed to get container status \"d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a\": rpc error: code = NotFound desc = could not find container \"d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a\": container with ID starting with d6ff2477e9b7f1ad412a527103dd648c78b3659b5f908ae963692b0274f1b89a not found: ID does not exist" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.309048 4863 scope.go:117] "RemoveContainer" containerID="e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10" Dec 05 09:10:33 crc kubenswrapper[4863]: E1205 09:10:33.309301 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10\": container with ID starting with e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10 not found: ID does not exist" containerID="e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10" Dec 05 09:10:33 crc kubenswrapper[4863]: I1205 09:10:33.309320 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10"} err="failed to get container status \"e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10\": rpc error: code = NotFound desc = could not find container \"e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10\": container with ID starting with e30edde239d804235f92ace4e76598aaedcab57724fc517e75f0a9e0336d7b10 not found: ID does not exist" Dec 05 09:10:34 crc kubenswrapper[4863]: I1205 09:10:34.615444 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" path="/var/lib/kubelet/pods/8c4ad363-5123-4df9-bd83-427519403dd6/volumes" Dec 05 09:10:35 crc kubenswrapper[4863]: I1205 09:10:35.601575 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:10:35 crc kubenswrapper[4863]: E1205 09:10:35.602192 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:10:50 crc kubenswrapper[4863]: I1205 09:10:50.601822 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:10:50 crc kubenswrapper[4863]: E1205 09:10:50.602598 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:11:05 crc kubenswrapper[4863]: I1205 09:11:05.602782 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:11:05 crc kubenswrapper[4863]: E1205 09:11:05.603593 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:11:16 crc kubenswrapper[4863]: I1205 09:11:16.602784 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:11:16 crc kubenswrapper[4863]: E1205 09:11:16.604165 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:11:27 crc kubenswrapper[4863]: I1205 09:11:27.602409 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:11:27 crc kubenswrapper[4863]: E1205 09:11:27.603243 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:11:41 crc kubenswrapper[4863]: I1205 09:11:41.601746 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:11:41 crc kubenswrapper[4863]: E1205 09:11:41.602657 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:11:53 crc kubenswrapper[4863]: I1205 09:11:53.601918 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:11:53 crc kubenswrapper[4863]: E1205 09:11:53.602650 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:12:07 crc kubenswrapper[4863]: I1205 09:12:07.602628 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:12:07 crc kubenswrapper[4863]: E1205 09:12:07.603496 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:12:20 crc kubenswrapper[4863]: I1205 09:12:20.602720 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:12:22 crc kubenswrapper[4863]: I1205 09:12:22.245023 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a"} Dec 05 09:13:14 crc kubenswrapper[4863]: I1205 09:13:14.722731 4863 generic.go:334] "Generic (PLEG): container finished" podID="c0e462cb-31a4-47c8-8d0a-1bd2044719b0" containerID="091c7392636edc4657e8a44a62152ff082b6f9bcf5663df693e4558f60bd2aa8" exitCode=0 Dec 05 09:13:14 crc kubenswrapper[4863]: I1205 09:13:14.722837 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" event={"ID":"c0e462cb-31a4-47c8-8d0a-1bd2044719b0","Type":"ContainerDied","Data":"091c7392636edc4657e8a44a62152ff082b6f9bcf5663df693e4558f60bd2aa8"} Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.371948 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433540 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433587 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433717 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l92z9\" (UniqueName: \"kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433735 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433785 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433811 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433837 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433872 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433890 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.433949 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.434009 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph\") pod \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\" (UID: \"c0e462cb-31a4-47c8-8d0a-1bd2044719b0\") " Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.440446 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph" (OuterVolumeSpecName: "ceph") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.439954 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.460834 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9" (OuterVolumeSpecName: "kube-api-access-l92z9") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "kube-api-access-l92z9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.467869 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.475819 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.476226 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory" (OuterVolumeSpecName: "inventory") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.482695 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.484070 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.490677 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.499567 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.500683 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "c0e462cb-31a4-47c8-8d0a-1bd2044719b0" (UID: "c0e462cb-31a4-47c8-8d0a-1bd2044719b0"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535783 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535856 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l92z9\" (UniqueName: \"kubernetes.io/projected/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-kube-api-access-l92z9\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535866 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535875 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535884 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535893 4863 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535902 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535910 4863 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535918 4863 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535926 4863 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.535934 4863 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c0e462cb-31a4-47c8-8d0a-1bd2044719b0-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.760763 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" event={"ID":"c0e462cb-31a4-47c8-8d0a-1bd2044719b0","Type":"ContainerDied","Data":"5d59055dd42a39409c8aab4ce161b4eb8d79eb5d7beb3a102a478312d85c2c5c"} Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.760809 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d59055dd42a39409c8aab4ce161b4eb8d79eb5d7beb3a102a478312d85c2c5c" Dec 05 09:13:16 crc kubenswrapper[4863]: I1205 09:13:16.760810 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.486219 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:13:39 crc kubenswrapper[4863]: E1205 09:13:39.490949 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="registry-server" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.490972 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="registry-server" Dec 05 09:13:39 crc kubenswrapper[4863]: E1205 09:13:39.490988 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="extract-utilities" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.490997 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="extract-utilities" Dec 05 09:13:39 crc kubenswrapper[4863]: E1205 09:13:39.491020 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0e462cb-31a4-47c8-8d0a-1bd2044719b0" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.491030 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0e462cb-31a4-47c8-8d0a-1bd2044719b0" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 05 09:13:39 crc kubenswrapper[4863]: E1205 09:13:39.491063 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="extract-content" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.491072 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="extract-content" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.491301 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0e462cb-31a4-47c8-8d0a-1bd2044719b0" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.491331 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4ad363-5123-4df9-bd83-427519403dd6" containerName="registry-server" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.493309 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.499462 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.647430 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.647853 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.648194 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwnp2\" (UniqueName: \"kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.750111 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.750316 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwnp2\" (UniqueName: \"kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.750391 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.750600 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.751032 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.771242 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwnp2\" (UniqueName: \"kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2\") pod \"redhat-operators-h7zmq\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:39 crc kubenswrapper[4863]: I1205 09:13:39.821460 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:40 crc kubenswrapper[4863]: I1205 09:13:40.320116 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:13:41 crc kubenswrapper[4863]: I1205 09:13:41.309692 4863 generic.go:334] "Generic (PLEG): container finished" podID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerID="b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e" exitCode=0 Dec 05 09:13:41 crc kubenswrapper[4863]: I1205 09:13:41.309767 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerDied","Data":"b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e"} Dec 05 09:13:41 crc kubenswrapper[4863]: I1205 09:13:41.311603 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerStarted","Data":"985a4d68326a70e61fa3b49d54371c337dfc7fd3d26414fa11d0e649ffe8e3a1"} Dec 05 09:13:41 crc kubenswrapper[4863]: I1205 09:13:41.311909 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:13:43 crc kubenswrapper[4863]: I1205 09:13:43.345323 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerStarted","Data":"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0"} Dec 05 09:13:46 crc kubenswrapper[4863]: I1205 09:13:46.377430 4863 generic.go:334] "Generic (PLEG): container finished" podID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerID="ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0" exitCode=0 Dec 05 09:13:46 crc kubenswrapper[4863]: I1205 09:13:46.377502 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerDied","Data":"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0"} Dec 05 09:13:47 crc kubenswrapper[4863]: I1205 09:13:47.389196 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerStarted","Data":"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d"} Dec 05 09:13:47 crc kubenswrapper[4863]: I1205 09:13:47.413607 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h7zmq" podStartSLOduration=2.894639875 podStartE2EDuration="8.413586357s" podCreationTimestamp="2025-12-05 09:13:39 +0000 UTC" firstStartedPulling="2025-12-05 09:13:41.31163527 +0000 UTC m=+8849.037632310" lastFinishedPulling="2025-12-05 09:13:46.830581742 +0000 UTC m=+8854.556578792" observedRunningTime="2025-12-05 09:13:47.405725675 +0000 UTC m=+8855.131722735" watchObservedRunningTime="2025-12-05 09:13:47.413586357 +0000 UTC m=+8855.139583397" Dec 05 09:13:49 crc kubenswrapper[4863]: I1205 09:13:49.822443 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:49 crc kubenswrapper[4863]: I1205 09:13:49.822528 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:50 crc kubenswrapper[4863]: I1205 09:13:50.871848 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h7zmq" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="registry-server" probeResult="failure" output=< Dec 05 09:13:50 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 09:13:50 crc kubenswrapper[4863]: > Dec 05 09:13:59 crc kubenswrapper[4863]: I1205 09:13:59.874573 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:13:59 crc kubenswrapper[4863]: I1205 09:13:59.928442 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:14:00 crc kubenswrapper[4863]: I1205 09:14:00.113948 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:14:01 crc kubenswrapper[4863]: I1205 09:14:01.522725 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h7zmq" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="registry-server" containerID="cri-o://8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d" gracePeriod=2 Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.060729 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.175581 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities\") pod \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.175651 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content\") pod \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.175771 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwnp2\" (UniqueName: \"kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2\") pod \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\" (UID: \"b930ebd1-d6df-48cd-a9bd-f5115d0f02da\") " Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.176483 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities" (OuterVolumeSpecName: "utilities") pod "b930ebd1-d6df-48cd-a9bd-f5115d0f02da" (UID: "b930ebd1-d6df-48cd-a9bd-f5115d0f02da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.181614 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2" (OuterVolumeSpecName: "kube-api-access-pwnp2") pod "b930ebd1-d6df-48cd-a9bd-f5115d0f02da" (UID: "b930ebd1-d6df-48cd-a9bd-f5115d0f02da"). InnerVolumeSpecName "kube-api-access-pwnp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.278147 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwnp2\" (UniqueName: \"kubernetes.io/projected/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-kube-api-access-pwnp2\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.278184 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.290584 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b930ebd1-d6df-48cd-a9bd-f5115d0f02da" (UID: "b930ebd1-d6df-48cd-a9bd-f5115d0f02da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.379908 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b930ebd1-d6df-48cd-a9bd-f5115d0f02da-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.534328 4863 generic.go:334] "Generic (PLEG): container finished" podID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerID="8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d" exitCode=0 Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.534379 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7zmq" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.534384 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerDied","Data":"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d"} Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.534427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7zmq" event={"ID":"b930ebd1-d6df-48cd-a9bd-f5115d0f02da","Type":"ContainerDied","Data":"985a4d68326a70e61fa3b49d54371c337dfc7fd3d26414fa11d0e649ffe8e3a1"} Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.534447 4863 scope.go:117] "RemoveContainer" containerID="8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.559580 4863 scope.go:117] "RemoveContainer" containerID="ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.589030 4863 scope.go:117] "RemoveContainer" containerID="b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.600207 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.617351 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h7zmq"] Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.645901 4863 scope.go:117] "RemoveContainer" containerID="8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d" Dec 05 09:14:02 crc kubenswrapper[4863]: E1205 09:14:02.646484 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d\": container with ID starting with 8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d not found: ID does not exist" containerID="8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.646522 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d"} err="failed to get container status \"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d\": rpc error: code = NotFound desc = could not find container \"8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d\": container with ID starting with 8bd6c94d10d1c82a2a34161749f3308a6ec9cf97ec97cf697b6cc597cda8900d not found: ID does not exist" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.646550 4863 scope.go:117] "RemoveContainer" containerID="ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0" Dec 05 09:14:02 crc kubenswrapper[4863]: E1205 09:14:02.646935 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0\": container with ID starting with ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0 not found: ID does not exist" containerID="ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.646961 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0"} err="failed to get container status \"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0\": rpc error: code = NotFound desc = could not find container \"ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0\": container with ID starting with ac6920d91955ff401bac5bc9c2d201f61b9cb8f86c2c3d3da6655e39e0e5f0f0 not found: ID does not exist" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.646977 4863 scope.go:117] "RemoveContainer" containerID="b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e" Dec 05 09:14:02 crc kubenswrapper[4863]: E1205 09:14:02.647277 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e\": container with ID starting with b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e not found: ID does not exist" containerID="b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e" Dec 05 09:14:02 crc kubenswrapper[4863]: I1205 09:14:02.647305 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e"} err="failed to get container status \"b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e\": rpc error: code = NotFound desc = could not find container \"b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e\": container with ID starting with b1aa9c876600a014918b7e4c0263259936fa632c7d944e11a4b18faa65d6466e not found: ID does not exist" Dec 05 09:14:04 crc kubenswrapper[4863]: I1205 09:14:04.614546 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" path="/var/lib/kubelet/pods/b930ebd1-d6df-48cd-a9bd-f5115d0f02da/volumes" Dec 05 09:14:38 crc kubenswrapper[4863]: I1205 09:14:38.464830 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:14:38 crc kubenswrapper[4863]: I1205 09:14:38.465461 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.187356 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658"] Dec 05 09:15:00 crc kubenswrapper[4863]: E1205 09:15:00.188321 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="extract-content" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.188338 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="extract-content" Dec 05 09:15:00 crc kubenswrapper[4863]: E1205 09:15:00.188373 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="registry-server" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.188380 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="registry-server" Dec 05 09:15:00 crc kubenswrapper[4863]: E1205 09:15:00.188404 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="extract-utilities" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.188411 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="extract-utilities" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.188706 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="b930ebd1-d6df-48cd-a9bd-f5115d0f02da" containerName="registry-server" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.189427 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.193246 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.193900 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.201371 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658"] Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.214065 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.214135 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.214173 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgtnk\" (UniqueName: \"kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.315841 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.315902 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.315935 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgtnk\" (UniqueName: \"kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.317286 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.329402 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.342555 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgtnk\" (UniqueName: \"kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk\") pod \"collect-profiles-29415435-sp658\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.517342 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:00 crc kubenswrapper[4863]: I1205 09:15:00.998261 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658"] Dec 05 09:15:01 crc kubenswrapper[4863]: I1205 09:15:01.133580 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" event={"ID":"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b","Type":"ContainerStarted","Data":"c4434c526698ded60e0799fd374611fc155919fb598bd30cdb0aa76e0285588a"} Dec 05 09:15:02 crc kubenswrapper[4863]: I1205 09:15:02.145203 4863 generic.go:334] "Generic (PLEG): container finished" podID="83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" containerID="c835aa0d8c2a7652003d24df84110e9b2d618752894073d332b77bbe9b17bb99" exitCode=0 Dec 05 09:15:02 crc kubenswrapper[4863]: I1205 09:15:02.145271 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" event={"ID":"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b","Type":"ContainerDied","Data":"c835aa0d8c2a7652003d24df84110e9b2d618752894073d332b77bbe9b17bb99"} Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.587959 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.680286 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume\") pod \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.680359 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume\") pod \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.680430 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgtnk\" (UniqueName: \"kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk\") pod \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\" (UID: \"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b\") " Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.681737 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume" (OuterVolumeSpecName: "config-volume") pod "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" (UID: "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.682780 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.688118 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk" (OuterVolumeSpecName: "kube-api-access-fgtnk") pod "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" (UID: "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b"). InnerVolumeSpecName "kube-api-access-fgtnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.694611 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" (UID: "83bd71d4-0e1e-4c18-af14-f66dd54f8f5b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.785605 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:03 crc kubenswrapper[4863]: I1205 09:15:03.785637 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgtnk\" (UniqueName: \"kubernetes.io/projected/83bd71d4-0e1e-4c18-af14-f66dd54f8f5b-kube-api-access-fgtnk\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:04 crc kubenswrapper[4863]: I1205 09:15:04.164251 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" event={"ID":"83bd71d4-0e1e-4c18-af14-f66dd54f8f5b","Type":"ContainerDied","Data":"c4434c526698ded60e0799fd374611fc155919fb598bd30cdb0aa76e0285588a"} Dec 05 09:15:04 crc kubenswrapper[4863]: I1205 09:15:04.164281 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-sp658" Dec 05 09:15:04 crc kubenswrapper[4863]: I1205 09:15:04.164287 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4434c526698ded60e0799fd374611fc155919fb598bd30cdb0aa76e0285588a" Dec 05 09:15:04 crc kubenswrapper[4863]: I1205 09:15:04.661906 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz"] Dec 05 09:15:04 crc kubenswrapper[4863]: I1205 09:15:04.672218 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-wq8cz"] Dec 05 09:15:06 crc kubenswrapper[4863]: I1205 09:15:06.615029 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60" path="/var/lib/kubelet/pods/c1ac6ae6-f735-40f7-ae8c-1b5b4aa37b60/volumes" Dec 05 09:15:08 crc kubenswrapper[4863]: I1205 09:15:08.465445 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:15:08 crc kubenswrapper[4863]: I1205 09:15:08.466656 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:15:19 crc kubenswrapper[4863]: I1205 09:15:19.141563 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 09:15:19 crc kubenswrapper[4863]: I1205 09:15:19.142321 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="970303aa-1c1f-4a60-9a92-a3d753caecef" containerName="adoption" containerID="cri-o://afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760" gracePeriod=30 Dec 05 09:15:38 crc kubenswrapper[4863]: I1205 09:15:38.464295 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:15:38 crc kubenswrapper[4863]: I1205 09:15:38.466790 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:15:38 crc kubenswrapper[4863]: I1205 09:15:38.467017 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:15:38 crc kubenswrapper[4863]: I1205 09:15:38.468338 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:15:38 crc kubenswrapper[4863]: I1205 09:15:38.468675 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a" gracePeriod=600 Dec 05 09:15:39 crc kubenswrapper[4863]: I1205 09:15:39.565001 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a" exitCode=0 Dec 05 09:15:39 crc kubenswrapper[4863]: I1205 09:15:39.565067 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a"} Dec 05 09:15:39 crc kubenswrapper[4863]: I1205 09:15:39.565645 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032"} Dec 05 09:15:39 crc kubenswrapper[4863]: I1205 09:15:39.565665 4863 scope.go:117] "RemoveContainer" containerID="deefea9a6d826649beb930d5ddf6d59c7f744b3333d13bf967cd9890225c9de6" Dec 05 09:15:40 crc kubenswrapper[4863]: I1205 09:15:40.934205 4863 scope.go:117] "RemoveContainer" containerID="c5f98559c6664b6fa1d9e785ae9cad40fa4c73adcc4438dbcf111dd55ca8e416" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.665086 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.673708 4863 generic.go:334] "Generic (PLEG): container finished" podID="970303aa-1c1f-4a60-9a92-a3d753caecef" containerID="afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760" exitCode=137 Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.673779 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"970303aa-1c1f-4a60-9a92-a3d753caecef","Type":"ContainerDied","Data":"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760"} Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.673810 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"970303aa-1c1f-4a60-9a92-a3d753caecef","Type":"ContainerDied","Data":"bd1ae1f1255c2b004800fb316c92e33ab02cddbf5ba6b7229ea71688ca704226"} Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.673831 4863 scope.go:117] "RemoveContainer" containerID="afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.731014 4863 scope.go:117] "RemoveContainer" containerID="afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760" Dec 05 09:15:49 crc kubenswrapper[4863]: E1205 09:15:49.733375 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760\": container with ID starting with afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760 not found: ID does not exist" containerID="afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.733443 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760"} err="failed to get container status \"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760\": rpc error: code = NotFound desc = could not find container \"afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760\": container with ID starting with afb56c3214d0e2bdbdddbc73d34ecf8db11c62432b1789f73779f3959bf62760 not found: ID does not exist" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.745074 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") pod \"970303aa-1c1f-4a60-9a92-a3d753caecef\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.745183 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkhl8\" (UniqueName: \"kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8\") pod \"970303aa-1c1f-4a60-9a92-a3d753caecef\" (UID: \"970303aa-1c1f-4a60-9a92-a3d753caecef\") " Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.753555 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8" (OuterVolumeSpecName: "kube-api-access-xkhl8") pod "970303aa-1c1f-4a60-9a92-a3d753caecef" (UID: "970303aa-1c1f-4a60-9a92-a3d753caecef"). InnerVolumeSpecName "kube-api-access-xkhl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.764947 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97" (OuterVolumeSpecName: "mariadb-data") pod "970303aa-1c1f-4a60-9a92-a3d753caecef" (UID: "970303aa-1c1f-4a60-9a92-a3d753caecef"). InnerVolumeSpecName "pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.847318 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") on node \"crc\" " Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.847361 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkhl8\" (UniqueName: \"kubernetes.io/projected/970303aa-1c1f-4a60-9a92-a3d753caecef-kube-api-access-xkhl8\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.877830 4863 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.878747 4863 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97") on node "crc" Dec 05 09:15:49 crc kubenswrapper[4863]: I1205 09:15:49.948938 4863 reconciler_common.go:293] "Volume detached for volume \"pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0dc77b11-2f93-4c91-a0e2-78b2b9245e97\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:50 crc kubenswrapper[4863]: I1205 09:15:50.684701 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 05 09:15:50 crc kubenswrapper[4863]: I1205 09:15:50.709231 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 09:15:50 crc kubenswrapper[4863]: I1205 09:15:50.719778 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Dec 05 09:15:51 crc kubenswrapper[4863]: I1205 09:15:51.304180 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Dec 05 09:15:51 crc kubenswrapper[4863]: I1205 09:15:51.304407 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="4e62a179-0048-43a4-943b-6f43455f44d1" containerName="adoption" containerID="cri-o://382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642" gracePeriod=30 Dec 05 09:15:52 crc kubenswrapper[4863]: I1205 09:15:52.614343 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="970303aa-1c1f-4a60-9a92-a3d753caecef" path="/var/lib/kubelet/pods/970303aa-1c1f-4a60-9a92-a3d753caecef/volumes" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.933451 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:18 crc kubenswrapper[4863]: E1205 09:16:18.934937 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="970303aa-1c1f-4a60-9a92-a3d753caecef" containerName="adoption" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.934957 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="970303aa-1c1f-4a60-9a92-a3d753caecef" containerName="adoption" Dec 05 09:16:18 crc kubenswrapper[4863]: E1205 09:16:18.934982 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" containerName="collect-profiles" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.934989 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" containerName="collect-profiles" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.935257 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="83bd71d4-0e1e-4c18-af14-f66dd54f8f5b" containerName="collect-profiles" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.935272 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="970303aa-1c1f-4a60-9a92-a3d753caecef" containerName="adoption" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.937277 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:18 crc kubenswrapper[4863]: I1205 09:16:18.955733 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.121931 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.122261 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkk6f\" (UniqueName: \"kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.122635 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.223796 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.223886 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.223913 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkk6f\" (UniqueName: \"kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.224501 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.224563 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.247660 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkk6f\" (UniqueName: \"kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f\") pod \"community-operators-wc692\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.276980 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.871459 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:19 crc kubenswrapper[4863]: I1205 09:16:19.981298 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerStarted","Data":"54ceb2f6696e7fc68f191ee6b9b0ddc2851a0f5b0d1434aa7b4871a3a94902d0"} Dec 05 09:16:20 crc kubenswrapper[4863]: I1205 09:16:20.991329 4863 generic.go:334] "Generic (PLEG): container finished" podID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerID="dfccdd940afe35fdfcc9fb02f349af15d8f26649926cdd00f856850906ba67be" exitCode=0 Dec 05 09:16:20 crc kubenswrapper[4863]: I1205 09:16:20.991386 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerDied","Data":"dfccdd940afe35fdfcc9fb02f349af15d8f26649926cdd00f856850906ba67be"} Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.858583 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.980244 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zprxh\" (UniqueName: \"kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh\") pod \"4e62a179-0048-43a4-943b-6f43455f44d1\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.983742 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") pod \"4e62a179-0048-43a4-943b-6f43455f44d1\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.983808 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert\") pod \"4e62a179-0048-43a4-943b-6f43455f44d1\" (UID: \"4e62a179-0048-43a4-943b-6f43455f44d1\") " Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.988341 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh" (OuterVolumeSpecName: "kube-api-access-zprxh") pod "4e62a179-0048-43a4-943b-6f43455f44d1" (UID: "4e62a179-0048-43a4-943b-6f43455f44d1"). InnerVolumeSpecName "kube-api-access-zprxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:21.990085 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "4e62a179-0048-43a4-943b-6f43455f44d1" (UID: "4e62a179-0048-43a4-943b-6f43455f44d1"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.021185 4863 generic.go:334] "Generic (PLEG): container finished" podID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerID="75143c7e7c7dc9f9def564aa75a8ef7fbffa905e9646d6a42bc7abbde60cfb67" exitCode=0 Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.021624 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184" (OuterVolumeSpecName: "ovn-data") pod "4e62a179-0048-43a4-943b-6f43455f44d1" (UID: "4e62a179-0048-43a4-943b-6f43455f44d1"). InnerVolumeSpecName "pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.021664 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerDied","Data":"75143c7e7c7dc9f9def564aa75a8ef7fbffa905e9646d6a42bc7abbde60cfb67"} Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.024021 4863 generic.go:334] "Generic (PLEG): container finished" podID="4e62a179-0048-43a4-943b-6f43455f44d1" containerID="382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642" exitCode=137 Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.024058 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4e62a179-0048-43a4-943b-6f43455f44d1","Type":"ContainerDied","Data":"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642"} Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.024086 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"4e62a179-0048-43a4-943b-6f43455f44d1","Type":"ContainerDied","Data":"80b5717d5dc1da4eaf8ab2a702231a159a093469d22fa5197ef6e04de3352aa2"} Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.024121 4863 scope.go:117] "RemoveContainer" containerID="382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.024301 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.086601 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zprxh\" (UniqueName: \"kubernetes.io/projected/4e62a179-0048-43a4-943b-6f43455f44d1-kube-api-access-zprxh\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.086708 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") on node \"crc\" " Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.086756 4863 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/4e62a179-0048-43a4-943b-6f43455f44d1-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.123032 4863 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.123335 4863 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184") on node "crc" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.125593 4863 scope.go:117] "RemoveContainer" containerID="382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642" Dec 05 09:16:22 crc kubenswrapper[4863]: E1205 09:16:22.127453 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642\": container with ID starting with 382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642 not found: ID does not exist" containerID="382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.127555 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642"} err="failed to get container status \"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642\": rpc error: code = NotFound desc = could not find container \"382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642\": container with ID starting with 382ded45bf308b47fce77e1acdaf7f50d1c297e175b59d908fae8d8d6b85f642 not found: ID does not exist" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.141304 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.151257 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.188833 4863 reconciler_common.go:293] "Volume detached for volume \"pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5d1b90c-1a4a-4374-ad8f-cf728c6cb184\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:22 crc kubenswrapper[4863]: I1205 09:16:22.613448 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e62a179-0048-43a4-943b-6f43455f44d1" path="/var/lib/kubelet/pods/4e62a179-0048-43a4-943b-6f43455f44d1/volumes" Dec 05 09:16:23 crc kubenswrapper[4863]: I1205 09:16:23.033910 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerStarted","Data":"1fb089dbf748b895a0be150b6777e5f0ceca429caf7ba6518531469b460921be"} Dec 05 09:16:23 crc kubenswrapper[4863]: I1205 09:16:23.054029 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wc692" podStartSLOduration=3.6056124819999997 podStartE2EDuration="5.054012516s" podCreationTimestamp="2025-12-05 09:16:18 +0000 UTC" firstStartedPulling="2025-12-05 09:16:20.993384883 +0000 UTC m=+9008.719381923" lastFinishedPulling="2025-12-05 09:16:22.441784917 +0000 UTC m=+9010.167781957" observedRunningTime="2025-12-05 09:16:23.048911582 +0000 UTC m=+9010.774908632" watchObservedRunningTime="2025-12-05 09:16:23.054012516 +0000 UTC m=+9010.780009556" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.112235 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:24 crc kubenswrapper[4863]: E1205 09:16:24.112954 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e62a179-0048-43a4-943b-6f43455f44d1" containerName="adoption" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.112967 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e62a179-0048-43a4-943b-6f43455f44d1" containerName="adoption" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.113218 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e62a179-0048-43a4-943b-6f43455f44d1" containerName="adoption" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.114678 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.125786 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.228304 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.228389 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfbdr\" (UniqueName: \"kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.228414 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.330825 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfbdr\" (UniqueName: \"kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.330879 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.331107 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.331462 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.331543 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.352943 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfbdr\" (UniqueName: \"kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr\") pod \"certified-operators-cvw9h\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.443306 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:24 crc kubenswrapper[4863]: I1205 09:16:24.960234 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:24 crc kubenswrapper[4863]: W1205 09:16:24.967584 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5456710_2f90_4969_9812_d6f206ba2890.slice/crio-6d550e83ae3274c19443f82187dde096f51ba5eae843c9d07400dd660807af55 WatchSource:0}: Error finding container 6d550e83ae3274c19443f82187dde096f51ba5eae843c9d07400dd660807af55: Status 404 returned error can't find the container with id 6d550e83ae3274c19443f82187dde096f51ba5eae843c9d07400dd660807af55 Dec 05 09:16:25 crc kubenswrapper[4863]: I1205 09:16:25.052253 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerStarted","Data":"6d550e83ae3274c19443f82187dde096f51ba5eae843c9d07400dd660807af55"} Dec 05 09:16:26 crc kubenswrapper[4863]: I1205 09:16:26.063912 4863 generic.go:334] "Generic (PLEG): container finished" podID="a5456710-2f90-4969-9812-d6f206ba2890" containerID="c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f" exitCode=0 Dec 05 09:16:26 crc kubenswrapper[4863]: I1205 09:16:26.064005 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerDied","Data":"c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f"} Dec 05 09:16:27 crc kubenswrapper[4863]: I1205 09:16:27.076944 4863 generic.go:334] "Generic (PLEG): container finished" podID="a5456710-2f90-4969-9812-d6f206ba2890" containerID="b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453" exitCode=0 Dec 05 09:16:27 crc kubenswrapper[4863]: I1205 09:16:27.077055 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerDied","Data":"b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453"} Dec 05 09:16:29 crc kubenswrapper[4863]: I1205 09:16:29.100542 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerStarted","Data":"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e"} Dec 05 09:16:29 crc kubenswrapper[4863]: I1205 09:16:29.125367 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cvw9h" podStartSLOduration=3.17545109 podStartE2EDuration="5.125333535s" podCreationTimestamp="2025-12-05 09:16:24 +0000 UTC" firstStartedPulling="2025-12-05 09:16:26.066126456 +0000 UTC m=+9013.792123496" lastFinishedPulling="2025-12-05 09:16:28.016008891 +0000 UTC m=+9015.742005941" observedRunningTime="2025-12-05 09:16:29.12104315 +0000 UTC m=+9016.847040180" watchObservedRunningTime="2025-12-05 09:16:29.125333535 +0000 UTC m=+9016.851330575" Dec 05 09:16:29 crc kubenswrapper[4863]: I1205 09:16:29.277920 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:29 crc kubenswrapper[4863]: I1205 09:16:29.277975 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:29 crc kubenswrapper[4863]: I1205 09:16:29.329797 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:30 crc kubenswrapper[4863]: I1205 09:16:30.625834 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:30 crc kubenswrapper[4863]: I1205 09:16:30.907190 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:32 crc kubenswrapper[4863]: I1205 09:16:32.590908 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wc692" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="registry-server" containerID="cri-o://1fb089dbf748b895a0be150b6777e5f0ceca429caf7ba6518531469b460921be" gracePeriod=2 Dec 05 09:16:33 crc kubenswrapper[4863]: I1205 09:16:33.603365 4863 generic.go:334] "Generic (PLEG): container finished" podID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerID="1fb089dbf748b895a0be150b6777e5f0ceca429caf7ba6518531469b460921be" exitCode=0 Dec 05 09:16:33 crc kubenswrapper[4863]: I1205 09:16:33.603403 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerDied","Data":"1fb089dbf748b895a0be150b6777e5f0ceca429caf7ba6518531469b460921be"} Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.260658 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.368736 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities\") pod \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.369113 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content\") pod \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.369181 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkk6f\" (UniqueName: \"kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f\") pod \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\" (UID: \"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2\") " Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.371396 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities" (OuterVolumeSpecName: "utilities") pod "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" (UID: "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.376215 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f" (OuterVolumeSpecName: "kube-api-access-zkk6f") pod "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" (UID: "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2"). InnerVolumeSpecName "kube-api-access-zkk6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.426127 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" (UID: "ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.444119 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.444170 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.472012 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkk6f\" (UniqueName: \"kubernetes.io/projected/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-kube-api-access-zkk6f\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.472040 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.472049 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.487898 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.617539 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc692" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.625109 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc692" event={"ID":"ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2","Type":"ContainerDied","Data":"54ceb2f6696e7fc68f191ee6b9b0ddc2851a0f5b0d1434aa7b4871a3a94902d0"} Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.625288 4863 scope.go:117] "RemoveContainer" containerID="1fb089dbf748b895a0be150b6777e5f0ceca429caf7ba6518531469b460921be" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.650640 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.653564 4863 scope.go:117] "RemoveContainer" containerID="75143c7e7c7dc9f9def564aa75a8ef7fbffa905e9646d6a42bc7abbde60cfb67" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.660345 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wc692"] Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.674131 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:34 crc kubenswrapper[4863]: I1205 09:16:34.675255 4863 scope.go:117] "RemoveContainer" containerID="dfccdd940afe35fdfcc9fb02f349af15d8f26649926cdd00f856850906ba67be" Dec 05 09:16:36 crc kubenswrapper[4863]: I1205 09:16:36.309138 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:36 crc kubenswrapper[4863]: I1205 09:16:36.614669 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" path="/var/lib/kubelet/pods/ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2/volumes" Dec 05 09:16:36 crc kubenswrapper[4863]: I1205 09:16:36.635133 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cvw9h" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="registry-server" containerID="cri-o://eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e" gracePeriod=2 Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.119987 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.229837 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities\") pod \"a5456710-2f90-4969-9812-d6f206ba2890\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.229896 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfbdr\" (UniqueName: \"kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr\") pod \"a5456710-2f90-4969-9812-d6f206ba2890\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.229949 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content\") pod \"a5456710-2f90-4969-9812-d6f206ba2890\" (UID: \"a5456710-2f90-4969-9812-d6f206ba2890\") " Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.230927 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities" (OuterVolumeSpecName: "utilities") pod "a5456710-2f90-4969-9812-d6f206ba2890" (UID: "a5456710-2f90-4969-9812-d6f206ba2890"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.237530 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr" (OuterVolumeSpecName: "kube-api-access-rfbdr") pod "a5456710-2f90-4969-9812-d6f206ba2890" (UID: "a5456710-2f90-4969-9812-d6f206ba2890"). InnerVolumeSpecName "kube-api-access-rfbdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.283004 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5456710-2f90-4969-9812-d6f206ba2890" (UID: "a5456710-2f90-4969-9812-d6f206ba2890"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.337653 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.337722 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfbdr\" (UniqueName: \"kubernetes.io/projected/a5456710-2f90-4969-9812-d6f206ba2890-kube-api-access-rfbdr\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.337742 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5456710-2f90-4969-9812-d6f206ba2890-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.646560 4863 generic.go:334] "Generic (PLEG): container finished" podID="a5456710-2f90-4969-9812-d6f206ba2890" containerID="eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e" exitCode=0 Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.646631 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvw9h" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.646654 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerDied","Data":"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e"} Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.647062 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvw9h" event={"ID":"a5456710-2f90-4969-9812-d6f206ba2890","Type":"ContainerDied","Data":"6d550e83ae3274c19443f82187dde096f51ba5eae843c9d07400dd660807af55"} Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.647107 4863 scope.go:117] "RemoveContainer" containerID="eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.668524 4863 scope.go:117] "RemoveContainer" containerID="b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.691440 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.701383 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cvw9h"] Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.719401 4863 scope.go:117] "RemoveContainer" containerID="c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.742666 4863 scope.go:117] "RemoveContainer" containerID="eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e" Dec 05 09:16:37 crc kubenswrapper[4863]: E1205 09:16:37.743245 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e\": container with ID starting with eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e not found: ID does not exist" containerID="eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.743334 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e"} err="failed to get container status \"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e\": rpc error: code = NotFound desc = could not find container \"eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e\": container with ID starting with eff42b87916a304af03596f50d2c79152dadad8a1dfe6d04de4a77b22978686e not found: ID does not exist" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.743390 4863 scope.go:117] "RemoveContainer" containerID="b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453" Dec 05 09:16:37 crc kubenswrapper[4863]: E1205 09:16:37.743995 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453\": container with ID starting with b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453 not found: ID does not exist" containerID="b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.744024 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453"} err="failed to get container status \"b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453\": rpc error: code = NotFound desc = could not find container \"b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453\": container with ID starting with b974a57c5cf0f21fabf79e5de239b7b3fa3720803d1dc1131403ed4ca60df453 not found: ID does not exist" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.744038 4863 scope.go:117] "RemoveContainer" containerID="c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f" Dec 05 09:16:37 crc kubenswrapper[4863]: E1205 09:16:37.744489 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f\": container with ID starting with c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f not found: ID does not exist" containerID="c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f" Dec 05 09:16:37 crc kubenswrapper[4863]: I1205 09:16:37.744516 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f"} err="failed to get container status \"c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f\": rpc error: code = NotFound desc = could not find container \"c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f\": container with ID starting with c5e5c8c14c08da908e92237012bbbb3d64cc7f2fe9f28f931182324b7cb2445f not found: ID does not exist" Dec 05 09:16:38 crc kubenswrapper[4863]: I1205 09:16:38.634789 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5456710-2f90-4969-9812-d6f206ba2890" path="/var/lib/kubelet/pods/a5456710-2f90-4969-9812-d6f206ba2890/volumes" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.197765 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.198884 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="extract-content" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.198902 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="extract-content" Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.198922 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.198932 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.198945 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="extract-utilities" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.198953 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="extract-utilities" Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.198969 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="extract-utilities" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.198977 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="extract-utilities" Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.199009 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="extract-content" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.199016 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="extract-content" Dec 05 09:16:42 crc kubenswrapper[4863]: E1205 09:16:42.199034 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.199040 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.199259 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5456710-2f90-4969-9812-d6f206ba2890" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.199297 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad85e368-e96b-4e6e-a2e9-a56ceff6f2e2" containerName="registry-server" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.200118 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.202257 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.203624 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.204669 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.205426 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.210311 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334517 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334573 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334617 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2njt5\" (UniqueName: \"kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334660 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334690 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334713 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334730 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334776 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.334817 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436680 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436752 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436814 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436861 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2njt5\" (UniqueName: \"kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436914 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436955 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.436984 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.437008 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.437070 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.437687 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.437769 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.437905 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.438527 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.438607 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.580094 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.580220 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.582134 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.583396 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2njt5\" (UniqueName: \"kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.612013 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " pod="openstack/tempest-tests-tempest" Dec 05 09:16:42 crc kubenswrapper[4863]: I1205 09:16:42.823324 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:16:43 crc kubenswrapper[4863]: I1205 09:16:43.300136 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:16:43 crc kubenswrapper[4863]: I1205 09:16:43.709603 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"fefafcc8-4071-43dd-8d8e-c40f20901efc","Type":"ContainerStarted","Data":"94e27a55d7933b0d7e596b281cbcddba53f29c0223548afd32084a7fae6a0486"} Dec 05 09:17:29 crc kubenswrapper[4863]: E1205 09:17:29.310392 4863 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:65066e8ca260a75886ae57f157049605" Dec 05 09:17:29 crc kubenswrapper[4863]: E1205 09:17:29.310976 4863 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:65066e8ca260a75886ae57f157049605" Dec 05 09:17:29 crc kubenswrapper[4863]: E1205 09:17:29.311107 4863 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:65066e8ca260a75886ae57f157049605,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2njt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(fefafcc8-4071-43dd-8d8e-c40f20901efc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 09:17:29 crc kubenswrapper[4863]: E1205 09:17:29.312460 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="fefafcc8-4071-43dd-8d8e-c40f20901efc" Dec 05 09:17:30 crc kubenswrapper[4863]: E1205 09:17:30.632907 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:65066e8ca260a75886ae57f157049605\\\"\"" pod="openstack/tempest-tests-tempest" podUID="fefafcc8-4071-43dd-8d8e-c40f20901efc" Dec 05 09:17:38 crc kubenswrapper[4863]: I1205 09:17:38.464756 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:17:38 crc kubenswrapper[4863]: I1205 09:17:38.465318 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:17:40 crc kubenswrapper[4863]: I1205 09:17:40.263713 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="metallb-system/metallb-operator-webhook-server-855bbdbb8d-n4cpf" podUID="5157c01b-e8a3-4f95-8f36-a41e0faa358b" containerName="webhook-server" probeResult="failure" output="Get \"http://10.217.0.49:7472/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:17:45 crc kubenswrapper[4863]: I1205 09:17:45.602615 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 09:17:46 crc kubenswrapper[4863]: I1205 09:17:46.779347 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"fefafcc8-4071-43dd-8d8e-c40f20901efc","Type":"ContainerStarted","Data":"b2e70e7c5a40bbb0274557bd4dc1377938b34408384ad71aabba149eb0a4489c"} Dec 05 09:17:46 crc kubenswrapper[4863]: I1205 09:17:46.795666 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.502541811 podStartE2EDuration="1m5.79564453s" podCreationTimestamp="2025-12-05 09:16:41 +0000 UTC" firstStartedPulling="2025-12-05 09:16:43.306170459 +0000 UTC m=+9031.032167499" lastFinishedPulling="2025-12-05 09:17:45.599273178 +0000 UTC m=+9093.325270218" observedRunningTime="2025-12-05 09:17:46.792912554 +0000 UTC m=+9094.518909594" watchObservedRunningTime="2025-12-05 09:17:46.79564453 +0000 UTC m=+9094.521641570" Dec 05 09:18:08 crc kubenswrapper[4863]: I1205 09:18:08.464511 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:18:08 crc kubenswrapper[4863]: I1205 09:18:08.465064 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:18:38 crc kubenswrapper[4863]: I1205 09:18:38.464027 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:18:38 crc kubenswrapper[4863]: I1205 09:18:38.464607 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:18:38 crc kubenswrapper[4863]: I1205 09:18:38.464653 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:18:38 crc kubenswrapper[4863]: I1205 09:18:38.465493 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:18:38 crc kubenswrapper[4863]: I1205 09:18:38.465552 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" gracePeriod=600 Dec 05 09:18:38 crc kubenswrapper[4863]: E1205 09:18:38.591360 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:18:39 crc kubenswrapper[4863]: I1205 09:18:39.367369 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" exitCode=0 Dec 05 09:18:39 crc kubenswrapper[4863]: I1205 09:18:39.367443 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032"} Dec 05 09:18:39 crc kubenswrapper[4863]: I1205 09:18:39.367736 4863 scope.go:117] "RemoveContainer" containerID="0813f46189acc4298f5e7eb3be8b9516f9a288f1fb3d5d36d56ef93c6916022a" Dec 05 09:18:39 crc kubenswrapper[4863]: I1205 09:18:39.368581 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:18:39 crc kubenswrapper[4863]: E1205 09:18:39.369078 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:18:49 crc kubenswrapper[4863]: I1205 09:18:49.603193 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:18:49 crc kubenswrapper[4863]: E1205 09:18:49.604122 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:19:04 crc kubenswrapper[4863]: I1205 09:19:04.602113 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:19:04 crc kubenswrapper[4863]: E1205 09:19:04.602936 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:19:15 crc kubenswrapper[4863]: I1205 09:19:15.602029 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:19:15 crc kubenswrapper[4863]: E1205 09:19:15.603033 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:19:28 crc kubenswrapper[4863]: I1205 09:19:28.603231 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:19:28 crc kubenswrapper[4863]: E1205 09:19:28.604152 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:19:42 crc kubenswrapper[4863]: I1205 09:19:42.609879 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:19:42 crc kubenswrapper[4863]: E1205 09:19:42.611032 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:19:57 crc kubenswrapper[4863]: I1205 09:19:57.603095 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:19:57 crc kubenswrapper[4863]: E1205 09:19:57.604975 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:20:10 crc kubenswrapper[4863]: I1205 09:20:10.602284 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:20:10 crc kubenswrapper[4863]: E1205 09:20:10.602974 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:20:22 crc kubenswrapper[4863]: I1205 09:20:22.616655 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:20:22 crc kubenswrapper[4863]: E1205 09:20:22.618339 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:20:37 crc kubenswrapper[4863]: I1205 09:20:37.602854 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:20:37 crc kubenswrapper[4863]: E1205 09:20:37.603618 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:20:49 crc kubenswrapper[4863]: I1205 09:20:49.602137 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:20:49 crc kubenswrapper[4863]: E1205 09:20:49.603120 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:02 crc kubenswrapper[4863]: I1205 09:21:02.613030 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:21:02 crc kubenswrapper[4863]: E1205 09:21:02.613823 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:16 crc kubenswrapper[4863]: I1205 09:21:16.602294 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:21:16 crc kubenswrapper[4863]: E1205 09:21:16.603355 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:27 crc kubenswrapper[4863]: I1205 09:21:27.601863 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:21:27 crc kubenswrapper[4863]: E1205 09:21:27.602533 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:41 crc kubenswrapper[4863]: I1205 09:21:41.603127 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:21:41 crc kubenswrapper[4863]: E1205 09:21:41.603949 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.143551 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.146199 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.156788 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.222582 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.222756 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.222826 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frzrb\" (UniqueName: \"kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.324165 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.324277 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frzrb\" (UniqueName: \"kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.324373 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.324719 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.324776 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.356383 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frzrb\" (UniqueName: \"kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb\") pod \"redhat-marketplace-dszvd\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:42 crc kubenswrapper[4863]: I1205 09:21:42.478997 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:43 crc kubenswrapper[4863]: I1205 09:21:43.194853 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:43 crc kubenswrapper[4863]: I1205 09:21:43.218628 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerStarted","Data":"01c7a0d081530ed53452aae7733ac74b438e07aac6565cae78b233d2776f99ab"} Dec 05 09:21:44 crc kubenswrapper[4863]: I1205 09:21:44.229580 4863 generic.go:334] "Generic (PLEG): container finished" podID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerID="cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496" exitCode=0 Dec 05 09:21:44 crc kubenswrapper[4863]: I1205 09:21:44.229662 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerDied","Data":"cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496"} Dec 05 09:21:44 crc kubenswrapper[4863]: I1205 09:21:44.232185 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:21:46 crc kubenswrapper[4863]: I1205 09:21:46.251528 4863 generic.go:334] "Generic (PLEG): container finished" podID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerID="ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183" exitCode=0 Dec 05 09:21:46 crc kubenswrapper[4863]: I1205 09:21:46.251622 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerDied","Data":"ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183"} Dec 05 09:21:48 crc kubenswrapper[4863]: I1205 09:21:48.274369 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerStarted","Data":"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e"} Dec 05 09:21:48 crc kubenswrapper[4863]: I1205 09:21:48.300187 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dszvd" podStartSLOduration=3.709079871 podStartE2EDuration="6.300169388s" podCreationTimestamp="2025-12-05 09:21:42 +0000 UTC" firstStartedPulling="2025-12-05 09:21:44.231984435 +0000 UTC m=+9331.957981475" lastFinishedPulling="2025-12-05 09:21:46.823073952 +0000 UTC m=+9334.549070992" observedRunningTime="2025-12-05 09:21:48.292382818 +0000 UTC m=+9336.018379858" watchObservedRunningTime="2025-12-05 09:21:48.300169388 +0000 UTC m=+9336.026166428" Dec 05 09:21:52 crc kubenswrapper[4863]: I1205 09:21:52.479883 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:52 crc kubenswrapper[4863]: I1205 09:21:52.480532 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:52 crc kubenswrapper[4863]: I1205 09:21:52.533026 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:53 crc kubenswrapper[4863]: I1205 09:21:53.370014 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:53 crc kubenswrapper[4863]: I1205 09:21:53.443134 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:54 crc kubenswrapper[4863]: I1205 09:21:54.602309 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:21:54 crc kubenswrapper[4863]: E1205 09:21:54.602989 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:21:55 crc kubenswrapper[4863]: I1205 09:21:55.338787 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dszvd" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="registry-server" containerID="cri-o://40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e" gracePeriod=2 Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.199748 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.311925 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frzrb\" (UniqueName: \"kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb\") pod \"ba41dd5d-1717-4805-b69c-c20061e76b5c\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.312047 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content\") pod \"ba41dd5d-1717-4805-b69c-c20061e76b5c\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.312116 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities\") pod \"ba41dd5d-1717-4805-b69c-c20061e76b5c\" (UID: \"ba41dd5d-1717-4805-b69c-c20061e76b5c\") " Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.314061 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities" (OuterVolumeSpecName: "utilities") pod "ba41dd5d-1717-4805-b69c-c20061e76b5c" (UID: "ba41dd5d-1717-4805-b69c-c20061e76b5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.324930 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb" (OuterVolumeSpecName: "kube-api-access-frzrb") pod "ba41dd5d-1717-4805-b69c-c20061e76b5c" (UID: "ba41dd5d-1717-4805-b69c-c20061e76b5c"). InnerVolumeSpecName "kube-api-access-frzrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.335806 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba41dd5d-1717-4805-b69c-c20061e76b5c" (UID: "ba41dd5d-1717-4805-b69c-c20061e76b5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.352688 4863 generic.go:334] "Generic (PLEG): container finished" podID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerID="40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e" exitCode=0 Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.352735 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerDied","Data":"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e"} Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.352763 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dszvd" event={"ID":"ba41dd5d-1717-4805-b69c-c20061e76b5c","Type":"ContainerDied","Data":"01c7a0d081530ed53452aae7733ac74b438e07aac6565cae78b233d2776f99ab"} Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.352780 4863 scope.go:117] "RemoveContainer" containerID="40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.352921 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dszvd" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.408696 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.412283 4863 scope.go:117] "RemoveContainer" containerID="ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.415934 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frzrb\" (UniqueName: \"kubernetes.io/projected/ba41dd5d-1717-4805-b69c-c20061e76b5c-kube-api-access-frzrb\") on node \"crc\" DevicePath \"\"" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.415971 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.415984 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba41dd5d-1717-4805-b69c-c20061e76b5c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.420114 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dszvd"] Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.443515 4863 scope.go:117] "RemoveContainer" containerID="cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.489326 4863 scope.go:117] "RemoveContainer" containerID="40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e" Dec 05 09:21:56 crc kubenswrapper[4863]: E1205 09:21:56.489730 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e\": container with ID starting with 40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e not found: ID does not exist" containerID="40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.489764 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e"} err="failed to get container status \"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e\": rpc error: code = NotFound desc = could not find container \"40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e\": container with ID starting with 40c515ae8213e6bf1676bf519411023869932bbbf3e6285f4b2f9f200d11856e not found: ID does not exist" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.489787 4863 scope.go:117] "RemoveContainer" containerID="ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183" Dec 05 09:21:56 crc kubenswrapper[4863]: E1205 09:21:56.489990 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183\": container with ID starting with ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183 not found: ID does not exist" containerID="ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.490012 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183"} err="failed to get container status \"ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183\": rpc error: code = NotFound desc = could not find container \"ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183\": container with ID starting with ae308b2e6d00042eb0e36b69a8d386060f6742549e9d53172a40a48656c63183 not found: ID does not exist" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.490025 4863 scope.go:117] "RemoveContainer" containerID="cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496" Dec 05 09:21:56 crc kubenswrapper[4863]: E1205 09:21:56.490194 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496\": container with ID starting with cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496 not found: ID does not exist" containerID="cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.490217 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496"} err="failed to get container status \"cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496\": rpc error: code = NotFound desc = could not find container \"cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496\": container with ID starting with cf7c4f49126a9538ab4cc4e56893254e8040e0f12105ed48b9a66525a70c1496 not found: ID does not exist" Dec 05 09:21:56 crc kubenswrapper[4863]: I1205 09:21:56.623958 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" path="/var/lib/kubelet/pods/ba41dd5d-1717-4805-b69c-c20061e76b5c/volumes" Dec 05 09:22:07 crc kubenswrapper[4863]: I1205 09:22:07.602076 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:22:07 crc kubenswrapper[4863]: E1205 09:22:07.604008 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:22:20 crc kubenswrapper[4863]: I1205 09:22:20.602274 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:22:20 crc kubenswrapper[4863]: E1205 09:22:20.603098 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:22:31 crc kubenswrapper[4863]: I1205 09:22:31.607822 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:22:31 crc kubenswrapper[4863]: E1205 09:22:31.608857 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:22:45 crc kubenswrapper[4863]: I1205 09:22:45.602644 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:22:45 crc kubenswrapper[4863]: E1205 09:22:45.603464 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:22:59 crc kubenswrapper[4863]: I1205 09:22:59.603265 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:22:59 crc kubenswrapper[4863]: E1205 09:22:59.604158 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:23:10 crc kubenswrapper[4863]: I1205 09:23:10.602161 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:23:10 crc kubenswrapper[4863]: E1205 09:23:10.602944 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:23:22 crc kubenswrapper[4863]: I1205 09:23:22.609160 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:23:22 crc kubenswrapper[4863]: E1205 09:23:22.609993 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:23:34 crc kubenswrapper[4863]: I1205 09:23:34.602497 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:23:34 crc kubenswrapper[4863]: E1205 09:23:34.603246 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:23:49 crc kubenswrapper[4863]: I1205 09:23:49.602256 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:23:50 crc kubenswrapper[4863]: I1205 09:23:50.590007 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a"} Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.934676 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:24:48 crc kubenswrapper[4863]: E1205 09:24:48.935725 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="registry-server" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.935744 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="registry-server" Dec 05 09:24:48 crc kubenswrapper[4863]: E1205 09:24:48.935801 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="extract-content" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.935811 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="extract-content" Dec 05 09:24:48 crc kubenswrapper[4863]: E1205 09:24:48.935844 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="extract-utilities" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.935854 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="extract-utilities" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.936113 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba41dd5d-1717-4805-b69c-c20061e76b5c" containerName="registry-server" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.938674 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:48 crc kubenswrapper[4863]: I1205 09:24:48.951113 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.001275 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.001372 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddfjg\" (UniqueName: \"kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.001494 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.103598 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.103674 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddfjg\" (UniqueName: \"kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.103753 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.104343 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.104415 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.126858 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddfjg\" (UniqueName: \"kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg\") pod \"redhat-operators-2txjr\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.264074 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:49 crc kubenswrapper[4863]: I1205 09:24:49.766641 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:24:50 crc kubenswrapper[4863]: I1205 09:24:50.180005 4863 generic.go:334] "Generic (PLEG): container finished" podID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerID="43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927" exitCode=0 Dec 05 09:24:50 crc kubenswrapper[4863]: I1205 09:24:50.180318 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerDied","Data":"43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927"} Dec 05 09:24:50 crc kubenswrapper[4863]: I1205 09:24:50.180348 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerStarted","Data":"33d00537f00f4779ff12dacd6e83f9265ba76cd8715aac09be5f53701e8a50e3"} Dec 05 09:24:51 crc kubenswrapper[4863]: I1205 09:24:51.192091 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerStarted","Data":"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57"} Dec 05 09:24:53 crc kubenswrapper[4863]: I1205 09:24:53.238537 4863 generic.go:334] "Generic (PLEG): container finished" podID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerID="ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57" exitCode=0 Dec 05 09:24:53 crc kubenswrapper[4863]: I1205 09:24:53.238765 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerDied","Data":"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57"} Dec 05 09:24:54 crc kubenswrapper[4863]: I1205 09:24:54.250620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerStarted","Data":"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef"} Dec 05 09:24:54 crc kubenswrapper[4863]: I1205 09:24:54.268250 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2txjr" podStartSLOduration=2.81360741 podStartE2EDuration="6.268231429s" podCreationTimestamp="2025-12-05 09:24:48 +0000 UTC" firstStartedPulling="2025-12-05 09:24:50.181545034 +0000 UTC m=+9517.907542074" lastFinishedPulling="2025-12-05 09:24:53.636169053 +0000 UTC m=+9521.362166093" observedRunningTime="2025-12-05 09:24:54.267732786 +0000 UTC m=+9521.993729836" watchObservedRunningTime="2025-12-05 09:24:54.268231429 +0000 UTC m=+9521.994228469" Dec 05 09:24:59 crc kubenswrapper[4863]: I1205 09:24:59.265073 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:59 crc kubenswrapper[4863]: I1205 09:24:59.265699 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:59 crc kubenswrapper[4863]: I1205 09:24:59.319166 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:59 crc kubenswrapper[4863]: I1205 09:24:59.379289 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:24:59 crc kubenswrapper[4863]: I1205 09:24:59.558700 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:25:01 crc kubenswrapper[4863]: I1205 09:25:01.319851 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2txjr" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="registry-server" containerID="cri-o://6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef" gracePeriod=2 Dec 05 09:25:01 crc kubenswrapper[4863]: I1205 09:25:01.888159 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.027026 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities\") pod \"8b727aeb-634e-443e-b21b-9e958259d8e5\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.027381 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities" (OuterVolumeSpecName: "utilities") pod "8b727aeb-634e-443e-b21b-9e958259d8e5" (UID: "8b727aeb-634e-443e-b21b-9e958259d8e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.027652 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content\") pod \"8b727aeb-634e-443e-b21b-9e958259d8e5\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.027902 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddfjg\" (UniqueName: \"kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg\") pod \"8b727aeb-634e-443e-b21b-9e958259d8e5\" (UID: \"8b727aeb-634e-443e-b21b-9e958259d8e5\") " Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.028819 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.034497 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg" (OuterVolumeSpecName: "kube-api-access-ddfjg") pod "8b727aeb-634e-443e-b21b-9e958259d8e5" (UID: "8b727aeb-634e-443e-b21b-9e958259d8e5"). InnerVolumeSpecName "kube-api-access-ddfjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.131821 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddfjg\" (UniqueName: \"kubernetes.io/projected/8b727aeb-634e-443e-b21b-9e958259d8e5-kube-api-access-ddfjg\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.151283 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b727aeb-634e-443e-b21b-9e958259d8e5" (UID: "8b727aeb-634e-443e-b21b-9e958259d8e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.234178 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b727aeb-634e-443e-b21b-9e958259d8e5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.332604 4863 generic.go:334] "Generic (PLEG): container finished" podID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerID="6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef" exitCode=0 Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.332651 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerDied","Data":"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef"} Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.332686 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2txjr" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.332709 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2txjr" event={"ID":"8b727aeb-634e-443e-b21b-9e958259d8e5","Type":"ContainerDied","Data":"33d00537f00f4779ff12dacd6e83f9265ba76cd8715aac09be5f53701e8a50e3"} Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.332742 4863 scope.go:117] "RemoveContainer" containerID="6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.357174 4863 scope.go:117] "RemoveContainer" containerID="ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.383582 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.396796 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2txjr"] Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.397619 4863 scope.go:117] "RemoveContainer" containerID="43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.437874 4863 scope.go:117] "RemoveContainer" containerID="6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef" Dec 05 09:25:02 crc kubenswrapper[4863]: E1205 09:25:02.438319 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef\": container with ID starting with 6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef not found: ID does not exist" containerID="6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.438369 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef"} err="failed to get container status \"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef\": rpc error: code = NotFound desc = could not find container \"6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef\": container with ID starting with 6fadce28b4d9f99dd307b8078bd16af6442984de603f91095fbc3b858a3756ef not found: ID does not exist" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.438400 4863 scope.go:117] "RemoveContainer" containerID="ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57" Dec 05 09:25:02 crc kubenswrapper[4863]: E1205 09:25:02.438815 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57\": container with ID starting with ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57 not found: ID does not exist" containerID="ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.438847 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57"} err="failed to get container status \"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57\": rpc error: code = NotFound desc = could not find container \"ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57\": container with ID starting with ea5218d867b30595df106382f0c58d81a37f0b1136e0cb10428957380fa1eb57 not found: ID does not exist" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.438867 4863 scope.go:117] "RemoveContainer" containerID="43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927" Dec 05 09:25:02 crc kubenswrapper[4863]: E1205 09:25:02.439236 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927\": container with ID starting with 43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927 not found: ID does not exist" containerID="43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.439277 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927"} err="failed to get container status \"43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927\": rpc error: code = NotFound desc = could not find container \"43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927\": container with ID starting with 43ea29066f689d87837152fb2d96f37b09fd8f78ab56ea0164a5dfccd5234927 not found: ID does not exist" Dec 05 09:25:02 crc kubenswrapper[4863]: I1205 09:25:02.616005 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" path="/var/lib/kubelet/pods/8b727aeb-634e-443e-b21b-9e958259d8e5/volumes" Dec 05 09:25:31 crc kubenswrapper[4863]: I1205 09:25:31.743772 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-bq7lw" podUID="d7017ca8-d0ed-4245-8786-be169d9dde3a" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.79:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:25:32 crc kubenswrapper[4863]: I1205 09:25:32.088722 4863 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kk8c5" podUID="b850a070-2bf4-4163-9e18-0315e1f0b250" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.83:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 09:26:08 crc kubenswrapper[4863]: I1205 09:26:08.463754 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:26:08 crc kubenswrapper[4863]: I1205 09:26:08.464257 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:26:38 crc kubenswrapper[4863]: I1205 09:26:38.464143 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:26:38 crc kubenswrapper[4863]: I1205 09:26:38.464681 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:27:08 crc kubenswrapper[4863]: I1205 09:27:08.464263 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:27:08 crc kubenswrapper[4863]: I1205 09:27:08.465010 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:27:08 crc kubenswrapper[4863]: I1205 09:27:08.465069 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:27:08 crc kubenswrapper[4863]: I1205 09:27:08.466207 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:27:08 crc kubenswrapper[4863]: I1205 09:27:08.466288 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a" gracePeriod=600 Dec 05 09:27:09 crc kubenswrapper[4863]: I1205 09:27:09.547980 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a" exitCode=0 Dec 05 09:27:09 crc kubenswrapper[4863]: I1205 09:27:09.548058 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a"} Dec 05 09:27:09 crc kubenswrapper[4863]: I1205 09:27:09.548628 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503"} Dec 05 09:27:09 crc kubenswrapper[4863]: I1205 09:27:09.548653 4863 scope.go:117] "RemoveContainer" containerID="307890de344810d052ab874ad10579f59afa882bcf946a8a6ea41e7661fe6032" Dec 05 09:27:10 crc kubenswrapper[4863]: I1205 09:27:10.559851 4863 generic.go:334] "Generic (PLEG): container finished" podID="fefafcc8-4071-43dd-8d8e-c40f20901efc" containerID="b2e70e7c5a40bbb0274557bd4dc1377938b34408384ad71aabba149eb0a4489c" exitCode=0 Dec 05 09:27:10 crc kubenswrapper[4863]: I1205 09:27:10.559939 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"fefafcc8-4071-43dd-8d8e-c40f20901efc","Type":"ContainerDied","Data":"b2e70e7c5a40bbb0274557bd4dc1377938b34408384ad71aabba149eb0a4489c"} Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.584374 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"fefafcc8-4071-43dd-8d8e-c40f20901efc","Type":"ContainerDied","Data":"94e27a55d7933b0d7e596b281cbcddba53f29c0223548afd32084a7fae6a0486"} Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.585135 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94e27a55d7933b0d7e596b281cbcddba53f29c0223548afd32084a7fae6a0486" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.636118 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827307 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827395 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2njt5\" (UniqueName: \"kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827539 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827669 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827720 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.827849 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.828030 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.828143 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.828230 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"fefafcc8-4071-43dd-8d8e-c40f20901efc\" (UID: \"fefafcc8-4071-43dd-8d8e-c40f20901efc\") " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.828981 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data" (OuterVolumeSpecName: "config-data") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.829292 4863 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.830298 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.835466 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5" (OuterVolumeSpecName: "kube-api-access-2njt5") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "kube-api-access-2njt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.836750 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.838175 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.865021 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.869089 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.872566 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.887158 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "fefafcc8-4071-43dd-8d8e-c40f20901efc" (UID: "fefafcc8-4071-43dd-8d8e-c40f20901efc"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932373 4863 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932451 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932516 4863 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932530 4863 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932541 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2njt5\" (UniqueName: \"kubernetes.io/projected/fefafcc8-4071-43dd-8d8e-c40f20901efc-kube-api-access-2njt5\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932551 4863 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/fefafcc8-4071-43dd-8d8e-c40f20901efc-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932566 4863 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fefafcc8-4071-43dd-8d8e-c40f20901efc-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.932579 4863 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/fefafcc8-4071-43dd-8d8e-c40f20901efc-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:12 crc kubenswrapper[4863]: I1205 09:27:12.958525 4863 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 09:27:13 crc kubenswrapper[4863]: I1205 09:27:13.034529 4863 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:13 crc kubenswrapper[4863]: I1205 09:27:13.595893 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.536976 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:17 crc kubenswrapper[4863]: E1205 09:27:17.537980 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="extract-content" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.537995 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="extract-content" Dec 05 09:27:17 crc kubenswrapper[4863]: E1205 09:27:17.538015 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fefafcc8-4071-43dd-8d8e-c40f20901efc" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.538021 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fefafcc8-4071-43dd-8d8e-c40f20901efc" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:27:17 crc kubenswrapper[4863]: E1205 09:27:17.538040 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="extract-utilities" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.538048 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="extract-utilities" Dec 05 09:27:17 crc kubenswrapper[4863]: E1205 09:27:17.538066 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="registry-server" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.538072 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="registry-server" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.538288 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fefafcc8-4071-43dd-8d8e-c40f20901efc" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.538304 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b727aeb-634e-443e-b21b-9e958259d8e5" containerName="registry-server" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.541649 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.555410 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.654020 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.654208 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjhd5\" (UniqueName: \"kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.654284 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.755596 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjhd5\" (UniqueName: \"kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.756073 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.756153 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.756948 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.756988 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.785308 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjhd5\" (UniqueName: \"kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5\") pod \"community-operators-9w7bc\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:17 crc kubenswrapper[4863]: I1205 09:27:17.898624 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:18 crc kubenswrapper[4863]: I1205 09:27:18.402925 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:18 crc kubenswrapper[4863]: I1205 09:27:18.648034 4863 generic.go:334] "Generic (PLEG): container finished" podID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerID="ddbdd8b1c78ff32060aee3b484836bdbf6fa7ab409bf0ae0078b39a2ac3022ae" exitCode=0 Dec 05 09:27:18 crc kubenswrapper[4863]: I1205 09:27:18.648239 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerDied","Data":"ddbdd8b1c78ff32060aee3b484836bdbf6fa7ab409bf0ae0078b39a2ac3022ae"} Dec 05 09:27:18 crc kubenswrapper[4863]: I1205 09:27:18.648464 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerStarted","Data":"cd6067a82f6291ca1857c68587358bf2932d32e953293e07dc2af016238246c6"} Dec 05 09:27:18 crc kubenswrapper[4863]: I1205 09:27:18.649863 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:27:20 crc kubenswrapper[4863]: I1205 09:27:20.669198 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerStarted","Data":"a91505c6d1b9fcf138d4bb648f83f702518ae62b39a74e0ea9456ab4a920ce70"} Dec 05 09:27:21 crc kubenswrapper[4863]: I1205 09:27:21.682328 4863 generic.go:334] "Generic (PLEG): container finished" podID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerID="a91505c6d1b9fcf138d4bb648f83f702518ae62b39a74e0ea9456ab4a920ce70" exitCode=0 Dec 05 09:27:21 crc kubenswrapper[4863]: I1205 09:27:21.682391 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerDied","Data":"a91505c6d1b9fcf138d4bb648f83f702518ae62b39a74e0ea9456ab4a920ce70"} Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.532401 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.534750 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.537324 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jh4nh" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.546903 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.652271 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q52xc\" (UniqueName: \"kubernetes.io/projected/00325ca5-c753-4d51-9d93-45f8d36f8535-kube-api-access-q52xc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.652692 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.696524 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerStarted","Data":"dba8af2398f3f39b0d1b816c9370aa9c4ece4e7ed4f2e7cb66b7d335c59b68a3"} Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.727683 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9w7bc" podStartSLOduration=2.282932696 podStartE2EDuration="5.727664052s" podCreationTimestamp="2025-12-05 09:27:17 +0000 UTC" firstStartedPulling="2025-12-05 09:27:18.649685121 +0000 UTC m=+9666.375682161" lastFinishedPulling="2025-12-05 09:27:22.094416477 +0000 UTC m=+9669.820413517" observedRunningTime="2025-12-05 09:27:22.717573446 +0000 UTC m=+9670.443570496" watchObservedRunningTime="2025-12-05 09:27:22.727664052 +0000 UTC m=+9670.453661092" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.754205 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q52xc\" (UniqueName: \"kubernetes.io/projected/00325ca5-c753-4d51-9d93-45f8d36f8535-kube-api-access-q52xc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.754399 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:22 crc kubenswrapper[4863]: I1205 09:27:22.754754 4863 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.280330 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q52xc\" (UniqueName: \"kubernetes.io/projected/00325ca5-c753-4d51-9d93-45f8d36f8535-kube-api-access-q52xc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.316037 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"00325ca5-c753-4d51-9d93-45f8d36f8535\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.454146 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.748955 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.751569 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.762310 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.873968 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.874038 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.874251 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv68g\" (UniqueName: \"kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.976267 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv68g\" (UniqueName: \"kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.976388 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.976451 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.976936 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.977030 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:23 crc kubenswrapper[4863]: I1205 09:27:23.982459 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:27:24 crc kubenswrapper[4863]: I1205 09:27:24.000420 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv68g\" (UniqueName: \"kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g\") pod \"certified-operators-spdgb\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:24 crc kubenswrapper[4863]: I1205 09:27:24.084638 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:24 crc kubenswrapper[4863]: W1205 09:27:24.602079 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc895e13e_52de_4a11_9f37_c8e4bdb73ed0.slice/crio-c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c WatchSource:0}: Error finding container c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c: Status 404 returned error can't find the container with id c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c Dec 05 09:27:24 crc kubenswrapper[4863]: I1205 09:27:24.614721 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:24 crc kubenswrapper[4863]: I1205 09:27:24.719533 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerStarted","Data":"c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c"} Dec 05 09:27:24 crc kubenswrapper[4863]: I1205 09:27:24.720844 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"00325ca5-c753-4d51-9d93-45f8d36f8535","Type":"ContainerStarted","Data":"afe82dcfc26608bd7dde0a6650f96a3bfb92843ae5a50d597aafeb72b371b890"} Dec 05 09:27:25 crc kubenswrapper[4863]: I1205 09:27:25.731698 4863 generic.go:334] "Generic (PLEG): container finished" podID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerID="bb7edabd0b203020a324d83ed150dcc599f907cbd6d1f1ddc0f78cdc6fae1a18" exitCode=0 Dec 05 09:27:25 crc kubenswrapper[4863]: I1205 09:27:25.731772 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerDied","Data":"bb7edabd0b203020a324d83ed150dcc599f907cbd6d1f1ddc0f78cdc6fae1a18"} Dec 05 09:27:26 crc kubenswrapper[4863]: I1205 09:27:26.756864 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"00325ca5-c753-4d51-9d93-45f8d36f8535","Type":"ContainerStarted","Data":"418b91b91d3a4b81d16d21dcfd19efa01ad1a7d02bac194cd02cb229c7e1415d"} Dec 05 09:27:26 crc kubenswrapper[4863]: I1205 09:27:26.760112 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerStarted","Data":"ae2ce200225289aeefd86d477259cab2f78e42647250d2eb1ef8ec019f5a3c2a"} Dec 05 09:27:26 crc kubenswrapper[4863]: I1205 09:27:26.783826 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=3.165233274 podStartE2EDuration="4.783807202s" podCreationTimestamp="2025-12-05 09:27:22 +0000 UTC" firstStartedPulling="2025-12-05 09:27:23.985232324 +0000 UTC m=+9671.711229364" lastFinishedPulling="2025-12-05 09:27:25.603806262 +0000 UTC m=+9673.329803292" observedRunningTime="2025-12-05 09:27:26.770386975 +0000 UTC m=+9674.496384035" watchObservedRunningTime="2025-12-05 09:27:26.783807202 +0000 UTC m=+9674.509804242" Dec 05 09:27:27 crc kubenswrapper[4863]: I1205 09:27:27.771296 4863 generic.go:334] "Generic (PLEG): container finished" podID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerID="ae2ce200225289aeefd86d477259cab2f78e42647250d2eb1ef8ec019f5a3c2a" exitCode=0 Dec 05 09:27:27 crc kubenswrapper[4863]: I1205 09:27:27.771418 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerDied","Data":"ae2ce200225289aeefd86d477259cab2f78e42647250d2eb1ef8ec019f5a3c2a"} Dec 05 09:27:27 crc kubenswrapper[4863]: I1205 09:27:27.899401 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:27 crc kubenswrapper[4863]: I1205 09:27:27.899458 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:27 crc kubenswrapper[4863]: I1205 09:27:27.953080 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:28 crc kubenswrapper[4863]: I1205 09:27:28.783346 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerStarted","Data":"9152899a4ab8cefe097b448d78970426b9395f955197f4b47e48dfc226d4186e"} Dec 05 09:27:28 crc kubenswrapper[4863]: I1205 09:27:28.806188 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-spdgb" podStartSLOduration=3.197416379 podStartE2EDuration="5.806169908s" podCreationTimestamp="2025-12-05 09:27:23 +0000 UTC" firstStartedPulling="2025-12-05 09:27:25.73537587 +0000 UTC m=+9673.461372910" lastFinishedPulling="2025-12-05 09:27:28.344129389 +0000 UTC m=+9676.070126439" observedRunningTime="2025-12-05 09:27:28.800815177 +0000 UTC m=+9676.526812237" watchObservedRunningTime="2025-12-05 09:27:28.806169908 +0000 UTC m=+9676.532166948" Dec 05 09:27:28 crc kubenswrapper[4863]: I1205 09:27:28.833601 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:30 crc kubenswrapper[4863]: I1205 09:27:30.331112 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:30 crc kubenswrapper[4863]: I1205 09:27:30.905316 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9w7bc" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="registry-server" containerID="cri-o://dba8af2398f3f39b0d1b816c9370aa9c4ece4e7ed4f2e7cb66b7d335c59b68a3" gracePeriod=2 Dec 05 09:27:31 crc kubenswrapper[4863]: I1205 09:27:31.919408 4863 generic.go:334] "Generic (PLEG): container finished" podID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerID="dba8af2398f3f39b0d1b816c9370aa9c4ece4e7ed4f2e7cb66b7d335c59b68a3" exitCode=0 Dec 05 09:27:31 crc kubenswrapper[4863]: I1205 09:27:31.919563 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerDied","Data":"dba8af2398f3f39b0d1b816c9370aa9c4ece4e7ed4f2e7cb66b7d335c59b68a3"} Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.074933 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.104722 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content\") pod \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.104761 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities\") pod \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.105033 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjhd5\" (UniqueName: \"kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5\") pod \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\" (UID: \"41d7523d-db90-4311-9e3d-e675cbdd7ec0\") " Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.105890 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities" (OuterVolumeSpecName: "utilities") pod "41d7523d-db90-4311-9e3d-e675cbdd7ec0" (UID: "41d7523d-db90-4311-9e3d-e675cbdd7ec0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.112826 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5" (OuterVolumeSpecName: "kube-api-access-jjhd5") pod "41d7523d-db90-4311-9e3d-e675cbdd7ec0" (UID: "41d7523d-db90-4311-9e3d-e675cbdd7ec0"). InnerVolumeSpecName "kube-api-access-jjhd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.170154 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41d7523d-db90-4311-9e3d-e675cbdd7ec0" (UID: "41d7523d-db90-4311-9e3d-e675cbdd7ec0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.207182 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjhd5\" (UniqueName: \"kubernetes.io/projected/41d7523d-db90-4311-9e3d-e675cbdd7ec0-kube-api-access-jjhd5\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.207215 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.207224 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41d7523d-db90-4311-9e3d-e675cbdd7ec0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.930788 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9w7bc" event={"ID":"41d7523d-db90-4311-9e3d-e675cbdd7ec0","Type":"ContainerDied","Data":"cd6067a82f6291ca1857c68587358bf2932d32e953293e07dc2af016238246c6"} Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.931152 4863 scope.go:117] "RemoveContainer" containerID="dba8af2398f3f39b0d1b816c9370aa9c4ece4e7ed4f2e7cb66b7d335c59b68a3" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.930874 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9w7bc" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.961015 4863 scope.go:117] "RemoveContainer" containerID="a91505c6d1b9fcf138d4bb648f83f702518ae62b39a74e0ea9456ab4a920ce70" Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.966959 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.978748 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9w7bc"] Dec 05 09:27:32 crc kubenswrapper[4863]: I1205 09:27:32.982855 4863 scope.go:117] "RemoveContainer" containerID="ddbdd8b1c78ff32060aee3b484836bdbf6fa7ab409bf0ae0078b39a2ac3022ae" Dec 05 09:27:34 crc kubenswrapper[4863]: I1205 09:27:34.085862 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:34 crc kubenswrapper[4863]: I1205 09:27:34.086159 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:34 crc kubenswrapper[4863]: I1205 09:27:34.134182 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:34 crc kubenswrapper[4863]: I1205 09:27:34.614289 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" path="/var/lib/kubelet/pods/41d7523d-db90-4311-9e3d-e675cbdd7ec0/volumes" Dec 05 09:27:35 crc kubenswrapper[4863]: I1205 09:27:35.002372 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:35 crc kubenswrapper[4863]: I1205 09:27:35.313783 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:36 crc kubenswrapper[4863]: I1205 09:27:36.969654 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-spdgb" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="registry-server" containerID="cri-o://9152899a4ab8cefe097b448d78970426b9395f955197f4b47e48dfc226d4186e" gracePeriod=2 Dec 05 09:27:37 crc kubenswrapper[4863]: I1205 09:27:37.985182 4863 generic.go:334] "Generic (PLEG): container finished" podID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerID="9152899a4ab8cefe097b448d78970426b9395f955197f4b47e48dfc226d4186e" exitCode=0 Dec 05 09:27:37 crc kubenswrapper[4863]: I1205 09:27:37.985270 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerDied","Data":"9152899a4ab8cefe097b448d78970426b9395f955197f4b47e48dfc226d4186e"} Dec 05 09:27:37 crc kubenswrapper[4863]: I1205 09:27:37.985636 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spdgb" event={"ID":"c895e13e-52de-4a11-9f37-c8e4bdb73ed0","Type":"ContainerDied","Data":"c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c"} Dec 05 09:27:37 crc kubenswrapper[4863]: I1205 09:27:37.985654 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4ce7c654bb4726351b6f91ca036ec8ca309c33b78b1d7f5ab3cc0ef58dc471c" Dec 05 09:27:37 crc kubenswrapper[4863]: I1205 09:27:37.987946 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.012957 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities\") pod \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.013338 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content\") pod \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.013542 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv68g\" (UniqueName: \"kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g\") pod \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\" (UID: \"c895e13e-52de-4a11-9f37-c8e4bdb73ed0\") " Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.014807 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities" (OuterVolumeSpecName: "utilities") pod "c895e13e-52de-4a11-9f37-c8e4bdb73ed0" (UID: "c895e13e-52de-4a11-9f37-c8e4bdb73ed0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.015084 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.030237 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g" (OuterVolumeSpecName: "kube-api-access-zv68g") pod "c895e13e-52de-4a11-9f37-c8e4bdb73ed0" (UID: "c895e13e-52de-4a11-9f37-c8e4bdb73ed0"). InnerVolumeSpecName "kube-api-access-zv68g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.067059 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c895e13e-52de-4a11-9f37-c8e4bdb73ed0" (UID: "c895e13e-52de-4a11-9f37-c8e4bdb73ed0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.116669 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.116715 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv68g\" (UniqueName: \"kubernetes.io/projected/c895e13e-52de-4a11-9f37-c8e4bdb73ed0-kube-api-access-zv68g\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:38 crc kubenswrapper[4863]: I1205 09:27:38.994786 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spdgb" Dec 05 09:27:39 crc kubenswrapper[4863]: I1205 09:27:39.023245 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:39 crc kubenswrapper[4863]: I1205 09:27:39.033003 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-spdgb"] Dec 05 09:27:40 crc kubenswrapper[4863]: I1205 09:27:40.613803 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" path="/var/lib/kubelet/pods/c895e13e-52de-4a11-9f37-c8e4bdb73ed0/volumes" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.155102 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8jrh/must-gather-q4dv8"] Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156162 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156178 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156198 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="extract-content" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156205 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="extract-content" Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156231 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="extract-utilities" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156243 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="extract-utilities" Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156283 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="extract-content" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156290 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="extract-content" Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156307 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156313 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: E1205 09:28:34.156334 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="extract-utilities" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156341 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="extract-utilities" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156612 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="c895e13e-52de-4a11-9f37-c8e4bdb73ed0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.156634 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="41d7523d-db90-4311-9e3d-e675cbdd7ec0" containerName="registry-server" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.158919 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.162896 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-b8jrh"/"kube-root-ca.crt" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.176518 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-b8jrh"/"default-dockercfg-wt2p6" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.176823 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-b8jrh"/"openshift-service-ca.crt" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.186173 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-b8jrh/must-gather-q4dv8"] Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.229122 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b5k8\" (UniqueName: \"kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.229517 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.331680 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.331782 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b5k8\" (UniqueName: \"kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.332198 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.355434 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b5k8\" (UniqueName: \"kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8\") pod \"must-gather-q4dv8\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:34 crc kubenswrapper[4863]: I1205 09:28:34.477566 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:28:35 crc kubenswrapper[4863]: I1205 09:28:35.026313 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-b8jrh/must-gather-q4dv8"] Dec 05 09:28:35 crc kubenswrapper[4863]: W1205 09:28:35.038902 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43d554cd_5fca_4add_804b_b86db0db2896.slice/crio-0a108e45f6245e3efc507d97b125ca3f4e47277e6b1cb454288c33abbbb91b44 WatchSource:0}: Error finding container 0a108e45f6245e3efc507d97b125ca3f4e47277e6b1cb454288c33abbbb91b44: Status 404 returned error can't find the container with id 0a108e45f6245e3efc507d97b125ca3f4e47277e6b1cb454288c33abbbb91b44 Dec 05 09:28:35 crc kubenswrapper[4863]: I1205 09:28:35.908519 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" event={"ID":"43d554cd-5fca-4add-804b-b86db0db2896","Type":"ContainerStarted","Data":"0a108e45f6245e3efc507d97b125ca3f4e47277e6b1cb454288c33abbbb91b44"} Dec 05 09:28:40 crc kubenswrapper[4863]: I1205 09:28:40.972577 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" event={"ID":"43d554cd-5fca-4add-804b-b86db0db2896","Type":"ContainerStarted","Data":"56299d7c00b8a920b410c29d459310ce93e37006e4cfe4d8b1a092a0ad6c303e"} Dec 05 09:28:41 crc kubenswrapper[4863]: I1205 09:28:41.982779 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" event={"ID":"43d554cd-5fca-4add-804b-b86db0db2896","Type":"ContainerStarted","Data":"66c36a39e8d024ca07460b431513f9255324f9ea84a0d180c09b5a343cae355e"} Dec 05 09:28:42 crc kubenswrapper[4863]: I1205 09:28:42.009240 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" podStartSLOduration=2.8343340169999998 podStartE2EDuration="8.009217883s" podCreationTimestamp="2025-12-05 09:28:34 +0000 UTC" firstStartedPulling="2025-12-05 09:28:35.040883995 +0000 UTC m=+9742.766881035" lastFinishedPulling="2025-12-05 09:28:40.215767861 +0000 UTC m=+9747.941764901" observedRunningTime="2025-12-05 09:28:41.99511575 +0000 UTC m=+9749.721112820" watchObservedRunningTime="2025-12-05 09:28:42.009217883 +0000 UTC m=+9749.735214923" Dec 05 09:28:44 crc kubenswrapper[4863]: E1205 09:28:44.231412 4863 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.106:56938->38.102.83.106:33381: read tcp 38.102.83.106:56938->38.102.83.106:33381: read: connection reset by peer Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.030282 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-9ntkz"] Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.032039 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.081105 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.081247 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvqlc\" (UniqueName: \"kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.182644 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvqlc\" (UniqueName: \"kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.183110 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.183182 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.205758 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvqlc\" (UniqueName: \"kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc\") pod \"crc-debug-9ntkz\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:45 crc kubenswrapper[4863]: I1205 09:28:45.357821 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:28:46 crc kubenswrapper[4863]: I1205 09:28:46.055535 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" event={"ID":"48c7c53d-f823-4f90-9db5-8463b8e33eb1","Type":"ContainerStarted","Data":"f1f719c5a962a123aaaea6e8dd7bc1a1821502c62a98b095126cd9bdfc4fa9d4"} Dec 05 09:29:00 crc kubenswrapper[4863]: I1205 09:29:00.210000 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" event={"ID":"48c7c53d-f823-4f90-9db5-8463b8e33eb1","Type":"ContainerStarted","Data":"c6db19903808b928ba2f2d3fc0ba4f8108585a0550769184c81f68e28a16a1fa"} Dec 05 09:29:00 crc kubenswrapper[4863]: I1205 09:29:00.229694 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" podStartSLOduration=1.267027892 podStartE2EDuration="15.229650111s" podCreationTimestamp="2025-12-05 09:28:45 +0000 UTC" firstStartedPulling="2025-12-05 09:28:45.391692341 +0000 UTC m=+9753.117689381" lastFinishedPulling="2025-12-05 09:28:59.35431456 +0000 UTC m=+9767.080311600" observedRunningTime="2025-12-05 09:29:00.226794351 +0000 UTC m=+9767.952791401" watchObservedRunningTime="2025-12-05 09:29:00.229650111 +0000 UTC m=+9767.955647161" Dec 05 09:29:08 crc kubenswrapper[4863]: I1205 09:29:08.464210 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:29:08 crc kubenswrapper[4863]: I1205 09:29:08.464874 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:29:38 crc kubenswrapper[4863]: I1205 09:29:38.464002 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:29:38 crc kubenswrapper[4863]: I1205 09:29:38.464590 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:29:48 crc kubenswrapper[4863]: I1205 09:29:48.709893 4863 generic.go:334] "Generic (PLEG): container finished" podID="48c7c53d-f823-4f90-9db5-8463b8e33eb1" containerID="c6db19903808b928ba2f2d3fc0ba4f8108585a0550769184c81f68e28a16a1fa" exitCode=0 Dec 05 09:29:48 crc kubenswrapper[4863]: I1205 09:29:48.710694 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" event={"ID":"48c7c53d-f823-4f90-9db5-8463b8e33eb1","Type":"ContainerDied","Data":"c6db19903808b928ba2f2d3fc0ba4f8108585a0550769184c81f68e28a16a1fa"} Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.090871 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.132705 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-9ntkz"] Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.142741 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-9ntkz"] Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.180104 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvqlc\" (UniqueName: \"kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc\") pod \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.180150 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host\") pod \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\" (UID: \"48c7c53d-f823-4f90-9db5-8463b8e33eb1\") " Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.180433 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host" (OuterVolumeSpecName: "host") pod "48c7c53d-f823-4f90-9db5-8463b8e33eb1" (UID: "48c7c53d-f823-4f90-9db5-8463b8e33eb1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.181503 4863 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48c7c53d-f823-4f90-9db5-8463b8e33eb1-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.185866 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc" (OuterVolumeSpecName: "kube-api-access-mvqlc") pod "48c7c53d-f823-4f90-9db5-8463b8e33eb1" (UID: "48c7c53d-f823-4f90-9db5-8463b8e33eb1"). InnerVolumeSpecName "kube-api-access-mvqlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.283928 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvqlc\" (UniqueName: \"kubernetes.io/projected/48c7c53d-f823-4f90-9db5-8463b8e33eb1-kube-api-access-mvqlc\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.615312 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48c7c53d-f823-4f90-9db5-8463b8e33eb1" path="/var/lib/kubelet/pods/48c7c53d-f823-4f90-9db5-8463b8e33eb1/volumes" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.738186 4863 scope.go:117] "RemoveContainer" containerID="c6db19903808b928ba2f2d3fc0ba4f8108585a0550769184c81f68e28a16a1fa" Dec 05 09:29:50 crc kubenswrapper[4863]: I1205 09:29:50.738210 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-9ntkz" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.291183 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-94ttc"] Dec 05 09:29:51 crc kubenswrapper[4863]: E1205 09:29:51.291966 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c7c53d-f823-4f90-9db5-8463b8e33eb1" containerName="container-00" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.291981 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c7c53d-f823-4f90-9db5-8463b8e33eb1" containerName="container-00" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.292178 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c7c53d-f823-4f90-9db5-8463b8e33eb1" containerName="container-00" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.292930 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.409547 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.410062 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6w2p\" (UniqueName: \"kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.512875 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.512954 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.512996 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6w2p\" (UniqueName: \"kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:51 crc kubenswrapper[4863]: I1205 09:29:51.979166 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6w2p\" (UniqueName: \"kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p\") pod \"crc-debug-94ttc\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:52 crc kubenswrapper[4863]: I1205 09:29:52.214266 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:52 crc kubenswrapper[4863]: I1205 09:29:52.760175 4863 generic.go:334] "Generic (PLEG): container finished" podID="868b95aa-1f78-4645-b9cb-1863e88b96b0" containerID="ab7ae691eeb5ba9fc15fa3b1c8e596d5de6555dcee85ed8bc7ab9277c9fa45ee" exitCode=0 Dec 05 09:29:52 crc kubenswrapper[4863]: I1205 09:29:52.760266 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" event={"ID":"868b95aa-1f78-4645-b9cb-1863e88b96b0","Type":"ContainerDied","Data":"ab7ae691eeb5ba9fc15fa3b1c8e596d5de6555dcee85ed8bc7ab9277c9fa45ee"} Dec 05 09:29:52 crc kubenswrapper[4863]: I1205 09:29:52.760616 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" event={"ID":"868b95aa-1f78-4645-b9cb-1863e88b96b0","Type":"ContainerStarted","Data":"8e72fb327e688dbcd18b51c6bc5f40e44805aa03a48a88cf10a598ac225669bb"} Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.466425 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-94ttc"] Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.480150 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-94ttc"] Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.872517 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.957658 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6w2p\" (UniqueName: \"kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p\") pod \"868b95aa-1f78-4645-b9cb-1863e88b96b0\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.957709 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host\") pod \"868b95aa-1f78-4645-b9cb-1863e88b96b0\" (UID: \"868b95aa-1f78-4645-b9cb-1863e88b96b0\") " Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.958139 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host" (OuterVolumeSpecName: "host") pod "868b95aa-1f78-4645-b9cb-1863e88b96b0" (UID: "868b95aa-1f78-4645-b9cb-1863e88b96b0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:29:53 crc kubenswrapper[4863]: I1205 09:29:53.962859 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p" (OuterVolumeSpecName: "kube-api-access-f6w2p") pod "868b95aa-1f78-4645-b9cb-1863e88b96b0" (UID: "868b95aa-1f78-4645-b9cb-1863e88b96b0"). InnerVolumeSpecName "kube-api-access-f6w2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.059923 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6w2p\" (UniqueName: \"kubernetes.io/projected/868b95aa-1f78-4645-b9cb-1863e88b96b0-kube-api-access-f6w2p\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.059967 4863 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/868b95aa-1f78-4645-b9cb-1863e88b96b0-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.615400 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="868b95aa-1f78-4645-b9cb-1863e88b96b0" path="/var/lib/kubelet/pods/868b95aa-1f78-4645-b9cb-1863e88b96b0/volumes" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.682338 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-kx2dx"] Dec 05 09:29:54 crc kubenswrapper[4863]: E1205 09:29:54.682794 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868b95aa-1f78-4645-b9cb-1863e88b96b0" containerName="container-00" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.682817 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="868b95aa-1f78-4645-b9cb-1863e88b96b0" containerName="container-00" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.683116 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="868b95aa-1f78-4645-b9cb-1863e88b96b0" containerName="container-00" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.683877 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.774628 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.775023 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw9wd\" (UniqueName: \"kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.780869 4863 scope.go:117] "RemoveContainer" containerID="ab7ae691eeb5ba9fc15fa3b1c8e596d5de6555dcee85ed8bc7ab9277c9fa45ee" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.780913 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-94ttc" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.876629 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.876699 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw9wd\" (UniqueName: \"kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.876787 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:54 crc kubenswrapper[4863]: I1205 09:29:54.894539 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw9wd\" (UniqueName: \"kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd\") pod \"crc-debug-kx2dx\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.002117 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:55 crc kubenswrapper[4863]: W1205 09:29:55.029261 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e87e893_1dce_47d7_ad6d_3a1c92fc8232.slice/crio-51fede02b82b06061ab8885c461ad8888528f04fd85207e8d5ee8a4d883954a6 WatchSource:0}: Error finding container 51fede02b82b06061ab8885c461ad8888528f04fd85207e8d5ee8a4d883954a6: Status 404 returned error can't find the container with id 51fede02b82b06061ab8885c461ad8888528f04fd85207e8d5ee8a4d883954a6 Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.794732 4863 generic.go:334] "Generic (PLEG): container finished" podID="2e87e893-1dce-47d7-ad6d-3a1c92fc8232" containerID="cd3698d45cd273050cbf01eef3137008744cc153b2a174c743c5950b91b5193b" exitCode=0 Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.794830 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" event={"ID":"2e87e893-1dce-47d7-ad6d-3a1c92fc8232","Type":"ContainerDied","Data":"cd3698d45cd273050cbf01eef3137008744cc153b2a174c743c5950b91b5193b"} Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.795551 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" event={"ID":"2e87e893-1dce-47d7-ad6d-3a1c92fc8232","Type":"ContainerStarted","Data":"51fede02b82b06061ab8885c461ad8888528f04fd85207e8d5ee8a4d883954a6"} Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.841116 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-kx2dx"] Dec 05 09:29:55 crc kubenswrapper[4863]: I1205 09:29:55.854341 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8jrh/crc-debug-kx2dx"] Dec 05 09:29:56 crc kubenswrapper[4863]: I1205 09:29:56.919779 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.019701 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw9wd\" (UniqueName: \"kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd\") pod \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.019876 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host\") pod \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\" (UID: \"2e87e893-1dce-47d7-ad6d-3a1c92fc8232\") " Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.020038 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host" (OuterVolumeSpecName: "host") pod "2e87e893-1dce-47d7-ad6d-3a1c92fc8232" (UID: "2e87e893-1dce-47d7-ad6d-3a1c92fc8232"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.020868 4863 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.025528 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd" (OuterVolumeSpecName: "kube-api-access-fw9wd") pod "2e87e893-1dce-47d7-ad6d-3a1c92fc8232" (UID: "2e87e893-1dce-47d7-ad6d-3a1c92fc8232"). InnerVolumeSpecName "kube-api-access-fw9wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.123304 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw9wd\" (UniqueName: \"kubernetes.io/projected/2e87e893-1dce-47d7-ad6d-3a1c92fc8232-kube-api-access-fw9wd\") on node \"crc\" DevicePath \"\"" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.825128 4863 scope.go:117] "RemoveContainer" containerID="cd3698d45cd273050cbf01eef3137008744cc153b2a174c743c5950b91b5193b" Dec 05 09:29:57 crc kubenswrapper[4863]: I1205 09:29:57.825427 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/crc-debug-kx2dx" Dec 05 09:29:58 crc kubenswrapper[4863]: I1205 09:29:58.616500 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e87e893-1dce-47d7-ad6d-3a1c92fc8232" path="/var/lib/kubelet/pods/2e87e893-1dce-47d7-ad6d-3a1c92fc8232/volumes" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.153855 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h"] Dec 05 09:30:00 crc kubenswrapper[4863]: E1205 09:30:00.154711 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e87e893-1dce-47d7-ad6d-3a1c92fc8232" containerName="container-00" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.154729 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e87e893-1dce-47d7-ad6d-3a1c92fc8232" containerName="container-00" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.154992 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e87e893-1dce-47d7-ad6d-3a1c92fc8232" containerName="container-00" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.155822 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.157849 4863 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.159965 4863 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.178072 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h"] Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.187368 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.187680 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.187853 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrqkp\" (UniqueName: \"kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.290250 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.290319 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.290436 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrqkp\" (UniqueName: \"kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.291592 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.296605 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.307335 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrqkp\" (UniqueName: \"kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp\") pod \"collect-profiles-29415450-9jf6h\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.479459 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:00 crc kubenswrapper[4863]: I1205 09:30:00.940313 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h"] Dec 05 09:30:01 crc kubenswrapper[4863]: I1205 09:30:01.867684 4863 generic.go:334] "Generic (PLEG): container finished" podID="9c806ba4-ffe3-4e79-9777-7276fff23b66" containerID="22a4cbe8204d5b382d13a20a4f0c8399704e70c2a29b35ce33e5ef1d007744f9" exitCode=0 Dec 05 09:30:01 crc kubenswrapper[4863]: I1205 09:30:01.867823 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" event={"ID":"9c806ba4-ffe3-4e79-9777-7276fff23b66","Type":"ContainerDied","Data":"22a4cbe8204d5b382d13a20a4f0c8399704e70c2a29b35ce33e5ef1d007744f9"} Dec 05 09:30:01 crc kubenswrapper[4863]: I1205 09:30:01.868019 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" event={"ID":"9c806ba4-ffe3-4e79-9777-7276fff23b66","Type":"ContainerStarted","Data":"6f6ac9b333a0d7a60d428800c7f3ed3ff5d456e2de64bb7c8f67f4bb9791ca6d"} Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.269522 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.358337 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume\") pod \"9c806ba4-ffe3-4e79-9777-7276fff23b66\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.358499 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume\") pod \"9c806ba4-ffe3-4e79-9777-7276fff23b66\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.358629 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrqkp\" (UniqueName: \"kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp\") pod \"9c806ba4-ffe3-4e79-9777-7276fff23b66\" (UID: \"9c806ba4-ffe3-4e79-9777-7276fff23b66\") " Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.359203 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c806ba4-ffe3-4e79-9777-7276fff23b66" (UID: "9c806ba4-ffe3-4e79-9777-7276fff23b66"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.364850 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c806ba4-ffe3-4e79-9777-7276fff23b66" (UID: "9c806ba4-ffe3-4e79-9777-7276fff23b66"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.365112 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp" (OuterVolumeSpecName: "kube-api-access-hrqkp") pod "9c806ba4-ffe3-4e79-9777-7276fff23b66" (UID: "9c806ba4-ffe3-4e79-9777-7276fff23b66"). InnerVolumeSpecName "kube-api-access-hrqkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.465582 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrqkp\" (UniqueName: \"kubernetes.io/projected/9c806ba4-ffe3-4e79-9777-7276fff23b66-kube-api-access-hrqkp\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.465626 4863 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c806ba4-ffe3-4e79-9777-7276fff23b66-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.465637 4863 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c806ba4-ffe3-4e79-9777-7276fff23b66-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.888830 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" event={"ID":"9c806ba4-ffe3-4e79-9777-7276fff23b66","Type":"ContainerDied","Data":"6f6ac9b333a0d7a60d428800c7f3ed3ff5d456e2de64bb7c8f67f4bb9791ca6d"} Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.889092 4863 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f6ac9b333a0d7a60d428800c7f3ed3ff5d456e2de64bb7c8f67f4bb9791ca6d" Dec 05 09:30:03 crc kubenswrapper[4863]: I1205 09:30:03.888877 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-9jf6h" Dec 05 09:30:04 crc kubenswrapper[4863]: I1205 09:30:04.359686 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t"] Dec 05 09:30:04 crc kubenswrapper[4863]: I1205 09:30:04.373492 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-5h42t"] Dec 05 09:30:04 crc kubenswrapper[4863]: I1205 09:30:04.621644 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88c64e5a-7c28-49a0-9c42-6b2596062da9" path="/var/lib/kubelet/pods/88c64e5a-7c28-49a0-9c42-6b2596062da9/volumes" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.463838 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.464413 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.464453 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.465196 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.465247 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" gracePeriod=600 Dec 05 09:30:08 crc kubenswrapper[4863]: E1205 09:30:08.585314 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.943175 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" exitCode=0 Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.943236 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503"} Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.943279 4863 scope.go:117] "RemoveContainer" containerID="fa35828f060b9587aee6082e6860ebf01b2588f7123c52cd4516a0cba1d3833a" Dec 05 09:30:08 crc kubenswrapper[4863]: I1205 09:30:08.944449 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:30:08 crc kubenswrapper[4863]: E1205 09:30:08.945028 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:30:19 crc kubenswrapper[4863]: I1205 09:30:19.602491 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:30:19 crc kubenswrapper[4863]: E1205 09:30:19.603189 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:30:31 crc kubenswrapper[4863]: I1205 09:30:31.602098 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:30:31 crc kubenswrapper[4863]: E1205 09:30:31.602883 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:30:41 crc kubenswrapper[4863]: I1205 09:30:41.465045 4863 scope.go:117] "RemoveContainer" containerID="7f6b45be49405f6a5ec2501171db0cd253b94c8f42c5a50938c66cfdf6237420" Dec 05 09:30:43 crc kubenswrapper[4863]: I1205 09:30:43.601782 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:30:43 crc kubenswrapper[4863]: E1205 09:30:43.602724 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:30:58 crc kubenswrapper[4863]: I1205 09:30:58.602756 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:30:58 crc kubenswrapper[4863]: E1205 09:30:58.603605 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:31:13 crc kubenswrapper[4863]: I1205 09:31:13.602022 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:31:13 crc kubenswrapper[4863]: E1205 09:31:13.602794 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:31:27 crc kubenswrapper[4863]: I1205 09:31:27.601545 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:31:27 crc kubenswrapper[4863]: E1205 09:31:27.602549 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:31:40 crc kubenswrapper[4863]: I1205 09:31:40.601674 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:31:40 crc kubenswrapper[4863]: E1205 09:31:40.602652 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:31:51 crc kubenswrapper[4863]: I1205 09:31:51.602383 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:31:51 crc kubenswrapper[4863]: E1205 09:31:51.603388 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:04 crc kubenswrapper[4863]: I1205 09:32:04.602662 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:32:04 crc kubenswrapper[4863]: E1205 09:32:04.603309 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.231574 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c3190ef6-e28b-43a9-bcaa-ab22eacb8142/init-config-reloader/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.490785 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c3190ef6-e28b-43a9-bcaa-ab22eacb8142/config-reloader/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.516085 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c3190ef6-e28b-43a9-bcaa-ab22eacb8142/init-config-reloader/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.518092 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c3190ef6-e28b-43a9-bcaa-ab22eacb8142/alertmanager/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.736241 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f665fbfc-2d49-498b-84f7-4f50900b8752/aodh-listener/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.754536 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f665fbfc-2d49-498b-84f7-4f50900b8752/aodh-evaluator/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.770208 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f665fbfc-2d49-498b-84f7-4f50900b8752/aodh-api/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.936902 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f665fbfc-2d49-498b-84f7-4f50900b8752/aodh-notifier/0.log" Dec 05 09:32:13 crc kubenswrapper[4863]: I1205 09:32:13.997355 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-867846d5d6-vrk5r_793c6422-8973-4613-aab9-56481761b45f/barbican-api/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.065403 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-867846d5d6-vrk5r_793c6422-8973-4613-aab9-56481761b45f/barbican-api-log/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.192998 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5546755c6b-ncfhj_9bc7dddf-4b40-4664-b820-2de712eab7b2/barbican-keystone-listener/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.457326 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-85b58948c5-nhcqr_41826e68-cab3-4923-bd7f-ff6364c1c910/barbican-worker/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.584903 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-85b58948c5-nhcqr_41826e68-cab3-4923-bd7f-ff6364c1c910/barbican-worker-log/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.671541 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5546755c6b-ncfhj_9bc7dddf-4b40-4664-b820-2de712eab7b2/barbican-keystone-listener-log/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.797346 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-hktm5_f1986f6c-07f7-4f07-8fac-a10054e34670/bootstrap-openstack-openstack-cell1/0.log" Dec 05 09:32:14 crc kubenswrapper[4863]: I1205 09:32:14.931192 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0e91d28d-aeed-46b7-9579-327651062fb6/ceilometer-central-agent/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.014217 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0e91d28d-aeed-46b7-9579-327651062fb6/ceilometer-notification-agent/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.040531 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0e91d28d-aeed-46b7-9579-327651062fb6/proxy-httpd/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.111809 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0e91d28d-aeed-46b7-9579-327651062fb6/sg-core/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.236041 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-tftgl_fbf27c93-62df-46a6-95c8-a3438e563849/ceph-client-openstack-openstack-cell1/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.384383 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_bc458e97-df3a-4597-b232-4508ce0a666c/cinder-api/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.397832 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_bc458e97-df3a-4597-b232-4508ce0a666c/cinder-api-log/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.628729 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_c1b0c15e-d09a-46fa-9a07-15665a5ae23e/cinder-backup/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.692784 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_c1b0c15e-d09a-46fa-9a07-15665a5ae23e/probe/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.736044 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b73ecb8a-f56c-4ddf-ae08-a7514506f310/cinder-scheduler/0.log" Dec 05 09:32:15 crc kubenswrapper[4863]: I1205 09:32:15.976575 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b73ecb8a-f56c-4ddf-ae08-a7514506f310/probe/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.073720 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_e8126d52-8f00-4eac-9feb-359fb635f044/cinder-volume/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.080397 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_e8126d52-8f00-4eac-9feb-359fb635f044/probe/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.270821 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-hqvvh_aeaedb49-cd07-46e7-934f-f1b05a66d43e/configure-network-openstack-openstack-cell1/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.320749 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-m9fbt_956afa12-575f-40e0-af41-fbb3accd6ad5/configure-os-openstack-openstack-cell1/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.544598 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-698794d99-lq2mw_e381f105-1a3a-4d04-a429-3e1627de35de/init/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.815222 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-698794d99-lq2mw_e381f105-1a3a-4d04-a429-3e1627de35de/init/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.821807 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-698794d99-lq2mw_e381f105-1a3a-4d04-a429-3e1627de35de/dnsmasq-dns/0.log" Dec 05 09:32:16 crc kubenswrapper[4863]: I1205 09:32:16.884861 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-4k99f_61304558-f044-4fd0-82f7-d2533e210fc2/download-cache-openstack-openstack-cell1/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.152216 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a65d4bd4-3e15-4293-a421-bbc89fbbdab8/glance-httpd/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.233591 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a65d4bd4-3e15-4293-a421-bbc89fbbdab8/glance-log/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.418906 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_87e9e170-a556-46aa-8d0a-b0b1fb6328ab/glance-httpd/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.458059 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_87e9e170-a556-46aa-8d0a-b0b1fb6328ab/glance-log/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.644735 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-598f986844-4pl5l_3d698629-4fde-46f2-a374-11418a9c99a6/heat-api/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.908157 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6f444c5f8b-2b4vc_8a6e1dc6-dc91-4697-a550-5058f0a7fc98/heat-cfnapi/0.log" Dec 05 09:32:17 crc kubenswrapper[4863]: I1205 09:32:17.918714 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-7cdbc8777d-ww6sx_76c4aa64-a697-4589-8cf1-cfe55095bf12/heat-engine/0.log" Dec 05 09:32:18 crc kubenswrapper[4863]: I1205 09:32:18.096334 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c8786bdb9-lfhbp_219bf904-4af3-4f1e-a1e9-97a72fcf6c0f/horizon/0.log" Dec 05 09:32:18 crc kubenswrapper[4863]: I1205 09:32:18.168270 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c8786bdb9-lfhbp_219bf904-4af3-4f1e-a1e9-97a72fcf6c0f/horizon-log/0.log" Dec 05 09:32:18 crc kubenswrapper[4863]: I1205 09:32:18.185186 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-zfd9m_54d41b89-e5bc-4b3a-b675-78b3eed49d1d/install-certs-openstack-openstack-cell1/0.log" Dec 05 09:32:18 crc kubenswrapper[4863]: I1205 09:32:18.469286 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-q4p64_bf7632e1-1460-44a9-95ab-45f0b0b94728/install-os-openstack-openstack-cell1/0.log" Dec 05 09:32:18 crc kubenswrapper[4863]: I1205 09:32:18.852062 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7cbdfccb-4r9s8_e86a0dbe-8a04-4f1d-8ed9-42ab258b4a0a/keystone-api/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.048387 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415421-vl2nr_d9c7f0b6-16f9-4aed-82cc-df709176bed3/keystone-cron/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.140352 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_413bfba6-563e-4617-90a0-de3e47ee0ef3/kube-state-metrics/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.189952 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-wwqmh_c16402f6-3e26-4148-9d4d-cce6fae37061/libvirt-openstack-openstack-cell1/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.310301 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_ff86e47b-ffeb-4075-a297-28f8096416c8/manila-api-log/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.506625 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_ff86e47b-ffeb-4075-a297-28f8096416c8/manila-api/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.507204 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_79867749-f1a2-4945-bc5a-8f58300da928/manila-scheduler/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.552178 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_79867749-f1a2-4945-bc5a-8f58300da928/probe/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.602096 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:32:19 crc kubenswrapper[4863]: E1205 09:32:19.602409 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.660951 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_4b64b9d9-479f-40e8-9ae2-82f00263fe59/manila-share/0.log" Dec 05 09:32:19 crc kubenswrapper[4863]: I1205 09:32:19.746512 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_4b64b9d9-479f-40e8-9ae2-82f00263fe59/probe/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.261698 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-4t74k_52510c93-a879-4c02-94b8-9142a9451a26/neutron-dhcp-openstack-openstack-cell1/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.263526 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-ccd7c9685-8g5gr_bc84c40c-704a-41c4-a018-c74351bc4227/neutron-httpd/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.496046 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-ccd7c9685-8g5gr_bc84c40c-704a-41c4-a018-c74351bc4227/neutron-api/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.612919 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-8r6vz_48649f07-ecaf-4827-bd55-1b03c8d8a53b/neutron-metadata-openstack-openstack-cell1/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.782749 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-jn22z_1e5562f8-cb23-40ea-a9a3-4de4f8ee6567/neutron-sriov-openstack-openstack-cell1/0.log" Dec 05 09:32:20 crc kubenswrapper[4863]: I1205 09:32:20.969646 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_68c101ff-556e-4b29-8b6c-1d38a8b51afe/nova-api-api/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.143804 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_68c101ff-556e-4b29-8b6c-1d38a8b51afe/nova-api-log/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.193260 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_eb6088a8-bcfe-47a9-9808-6bfaf328e4fa/nova-cell0-conductor-conductor/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.396753 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_8f7c4589-4fe4-4bf5-afbc-fde8e6d3985a/nova-cell1-conductor-conductor/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.540238 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_70237596-6b95-48be-825a-c52559057fe9/memcached/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.563266 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_269bb40a-af4e-435f-93b8-6a8db7891dfa/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.755427 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellqpl2d_c0e462cb-31a4-47c8-8d0a-1bd2044719b0/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.906959 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34/nova-metadata-log/0.log" Dec 05 09:32:21 crc kubenswrapper[4863]: I1205 09:32:21.916437 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-vjm8z_d7ea9e9b-e21d-4a97-a1ff-e5be88487c36/nova-cell1-openstack-openstack-cell1/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.312011 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c20b40d0-0d53-4f0b-8ee6-ba7ed886ac34/nova-metadata-metadata/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.397822 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9453bfe6-ca55-416b-8f61-76557fad23aa/mysql-bootstrap/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.461227 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cbb3dd65-ec81-45b1-9bc0-83466df03e4c/nova-scheduler-scheduler/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.604306 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9453bfe6-ca55-416b-8f61-76557fad23aa/mysql-bootstrap/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.620117 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9453bfe6-ca55-416b-8f61-76557fad23aa/galera/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.687346 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e708f8-fade-40dd-852c-8f6e08b8db54/mysql-bootstrap/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.894872 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e708f8-fade-40dd-852c-8f6e08b8db54/mysql-bootstrap/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.917955 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e708f8-fade-40dd-852c-8f6e08b8db54/galera/0.log" Dec 05 09:32:22 crc kubenswrapper[4863]: I1205 09:32:22.933795 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a69b1ece-1229-466e-9427-08948d0e1144/openstackclient/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.122701 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0abe8166-fcb3-43c9-894f-36def72e5dde/ovn-northd/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.165546 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0abe8166-fcb3-43c9-894f-36def72e5dde/openstack-network-exporter/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.289851 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-l64s5_ecd68f58-3555-4f34-98a7-f833c7b3514f/ovn-openstack-openstack-cell1/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.351505 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3900cd64-7e39-475d-a10d-aacc603edd0e/ovsdbserver-nb/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.392549 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3900cd64-7e39-475d-a10d-aacc603edd0e/openstack-network-exporter/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.504297 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3/openstack-network-exporter/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.553368 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_5213b0ea-3f9d-4df5-a3bd-2eb3330a6da3/ovsdbserver-nb/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.562892 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_c3c00709-2068-4e67-875b-63e1686cbac0/openstack-network-exporter/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.733745 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_c3c00709-2068-4e67-875b-63e1686cbac0/ovsdbserver-nb/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.805595 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a2103d64-1d3c-4b23-8d0e-f75fb68ac54c/ovsdbserver-sb/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.814756 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a2103d64-1d3c-4b23-8d0e-f75fb68ac54c/openstack-network-exporter/0.log" Dec 05 09:32:23 crc kubenswrapper[4863]: I1205 09:32:23.978866 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734/openstack-network-exporter/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.075405 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_c0211a31-33ca-4ef2-a7ca-482e4506e8dc/openstack-network-exporter/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.077517 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_c9b0e25d-1c6c-43dd-84f7-cbe0b62d3734/ovsdbserver-sb/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.189035 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_c0211a31-33ca-4ef2-a7ca-482e4506e8dc/ovsdbserver-sb/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.371130 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f7bc9655b-tknf7_d552ffcf-15fc-41cb-9a16-632cccb1ca19/placement-api/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.432306 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f7bc9655b-tknf7_d552ffcf-15fc-41cb-9a16-632cccb1ca19/placement-log/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.434608 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-c549tp_178bee9c-f288-4ca1-ac2a-d8295a776458/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.608741 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_0923802c-8b5d-46ef-b409-ad5f7959ff09/init-config-reloader/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.814315 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_0923802c-8b5d-46ef-b409-ad5f7959ff09/config-reloader/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.854248 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_0923802c-8b5d-46ef-b409-ad5f7959ff09/thanos-sidecar/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.859771 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_0923802c-8b5d-46ef-b409-ad5f7959ff09/prometheus/0.log" Dec 05 09:32:24 crc kubenswrapper[4863]: I1205 09:32:24.890193 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_0923802c-8b5d-46ef-b409-ad5f7959ff09/init-config-reloader/0.log" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.019196 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f62d64b0-f790-4c51-9777-6141cbba6a79/setup-container/0.log" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.102037 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:25 crc kubenswrapper[4863]: E1205 09:32:25.102570 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c806ba4-ffe3-4e79-9777-7276fff23b66" containerName="collect-profiles" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.102593 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c806ba4-ffe3-4e79-9777-7276fff23b66" containerName="collect-profiles" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.102838 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c806ba4-ffe3-4e79-9777-7276fff23b66" containerName="collect-profiles" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.104447 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.122250 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.122386 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.122430 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw5sm\" (UniqueName: \"kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.135026 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.224559 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.224637 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw5sm\" (UniqueName: \"kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.224735 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.225392 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.225628 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.245715 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw5sm\" (UniqueName: \"kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm\") pod \"redhat-marketplace-p22xz\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.427404 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.632117 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f62d64b0-f790-4c51-9777-6141cbba6a79/rabbitmq/0.log" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.635099 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b59a534f-9073-4dd4-aaae-75fcde18ffda/setup-container/0.log" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.708034 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f62d64b0-f790-4c51-9777-6141cbba6a79/setup-container/0.log" Dec 05 09:32:25 crc kubenswrapper[4863]: I1205 09:32:25.939914 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b59a534f-9073-4dd4-aaae-75fcde18ffda/setup-container/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.039943 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-9pbtw_20f4cfb0-2e67-48ce-9e9c-dda9ef756746/reboot-os-openstack-openstack-cell1/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.067312 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b59a534f-9073-4dd4-aaae-75fcde18ffda/rabbitmq/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.081709 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.163948 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-chtq4_4e547abc-412f-415b-ae64-4df536300d7d/run-os-openstack-openstack-cell1/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.326254 4863 generic.go:334] "Generic (PLEG): container finished" podID="fd4052ac-e5da-496f-859a-5754c6748098" containerID="5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353" exitCode=0 Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.326326 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerDied","Data":"5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353"} Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.326376 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerStarted","Data":"8fb006bf70fe18f541a683e87f3b781848295c109bbe19cb90ff57cd9cc10895"} Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.327349 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-rqfjv_02944a5b-402f-48e5-9c09-f39051d9a0d4/ssh-known-hosts-openstack/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.332766 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.407261 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-4sf42_084f48bc-7cc9-4c09-862f-e0218ee9087a/telemetry-openstack-openstack-cell1/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.596793 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_fefafcc8-4071-43dd-8d8e-c40f20901efc/tempest-tests-tempest-tests-runner/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.638730 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_00325ca5-c753-4d51-9d93-45f8d36f8535/test-operator-logs-container/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.757365 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-snshz_117a0ebe-6023-41c5-b416-f583e46b4ce4/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Dec 05 09:32:26 crc kubenswrapper[4863]: I1205 09:32:26.858592 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-74xzf_8dddadd9-2948-426d-80e5-a47c45569ec6/validate-network-openstack-openstack-cell1/0.log" Dec 05 09:32:28 crc kubenswrapper[4863]: I1205 09:32:28.348344 4863 generic.go:334] "Generic (PLEG): container finished" podID="fd4052ac-e5da-496f-859a-5754c6748098" containerID="1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062" exitCode=0 Dec 05 09:32:28 crc kubenswrapper[4863]: I1205 09:32:28.348427 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerDied","Data":"1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062"} Dec 05 09:32:29 crc kubenswrapper[4863]: I1205 09:32:29.362893 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerStarted","Data":"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49"} Dec 05 09:32:29 crc kubenswrapper[4863]: I1205 09:32:29.392804 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p22xz" podStartSLOduration=1.956152026 podStartE2EDuration="4.392783875s" podCreationTimestamp="2025-12-05 09:32:25 +0000 UTC" firstStartedPulling="2025-12-05 09:32:26.332421573 +0000 UTC m=+9974.058418613" lastFinishedPulling="2025-12-05 09:32:28.769053422 +0000 UTC m=+9976.495050462" observedRunningTime="2025-12-05 09:32:29.383493278 +0000 UTC m=+9977.109490338" watchObservedRunningTime="2025-12-05 09:32:29.392783875 +0000 UTC m=+9977.118780915" Dec 05 09:32:32 crc kubenswrapper[4863]: I1205 09:32:32.611644 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:32:32 crc kubenswrapper[4863]: E1205 09:32:32.616652 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:35 crc kubenswrapper[4863]: I1205 09:32:35.428022 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:35 crc kubenswrapper[4863]: I1205 09:32:35.429426 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:35 crc kubenswrapper[4863]: I1205 09:32:35.490615 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:35 crc kubenswrapper[4863]: I1205 09:32:35.831424 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:35 crc kubenswrapper[4863]: I1205 09:32:35.913549 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:37 crc kubenswrapper[4863]: I1205 09:32:37.795493 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p22xz" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="registry-server" containerID="cri-o://2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49" gracePeriod=2 Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.453609 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.525138 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities\") pod \"fd4052ac-e5da-496f-859a-5754c6748098\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.525313 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content\") pod \"fd4052ac-e5da-496f-859a-5754c6748098\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.525348 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw5sm\" (UniqueName: \"kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm\") pod \"fd4052ac-e5da-496f-859a-5754c6748098\" (UID: \"fd4052ac-e5da-496f-859a-5754c6748098\") " Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.526308 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities" (OuterVolumeSpecName: "utilities") pod "fd4052ac-e5da-496f-859a-5754c6748098" (UID: "fd4052ac-e5da-496f-859a-5754c6748098"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.531816 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm" (OuterVolumeSpecName: "kube-api-access-zw5sm") pod "fd4052ac-e5da-496f-859a-5754c6748098" (UID: "fd4052ac-e5da-496f-859a-5754c6748098"). InnerVolumeSpecName "kube-api-access-zw5sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.549259 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd4052ac-e5da-496f-859a-5754c6748098" (UID: "fd4052ac-e5da-496f-859a-5754c6748098"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.627560 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.627608 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw5sm\" (UniqueName: \"kubernetes.io/projected/fd4052ac-e5da-496f-859a-5754c6748098-kube-api-access-zw5sm\") on node \"crc\" DevicePath \"\"" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.627622 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4052ac-e5da-496f-859a-5754c6748098-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.807869 4863 generic.go:334] "Generic (PLEG): container finished" podID="fd4052ac-e5da-496f-859a-5754c6748098" containerID="2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49" exitCode=0 Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.807935 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerDied","Data":"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49"} Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.807966 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p22xz" event={"ID":"fd4052ac-e5da-496f-859a-5754c6748098","Type":"ContainerDied","Data":"8fb006bf70fe18f541a683e87f3b781848295c109bbe19cb90ff57cd9cc10895"} Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.807984 4863 scope.go:117] "RemoveContainer" containerID="2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.808132 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p22xz" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.833071 4863 scope.go:117] "RemoveContainer" containerID="1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.840933 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.852711 4863 scope.go:117] "RemoveContainer" containerID="5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.855360 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p22xz"] Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.910648 4863 scope.go:117] "RemoveContainer" containerID="2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49" Dec 05 09:32:38 crc kubenswrapper[4863]: E1205 09:32:38.911183 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49\": container with ID starting with 2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49 not found: ID does not exist" containerID="2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.911227 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49"} err="failed to get container status \"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49\": rpc error: code = NotFound desc = could not find container \"2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49\": container with ID starting with 2c90e9d462b4638643c7bebda5fd72b2603d4325a067ebc53460eeb4aff6be49 not found: ID does not exist" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.911255 4863 scope.go:117] "RemoveContainer" containerID="1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062" Dec 05 09:32:38 crc kubenswrapper[4863]: E1205 09:32:38.911810 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062\": container with ID starting with 1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062 not found: ID does not exist" containerID="1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.911887 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062"} err="failed to get container status \"1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062\": rpc error: code = NotFound desc = could not find container \"1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062\": container with ID starting with 1abe283cae649e65da972eadd886b1f058650cd087d42cc0fb243b094d841062 not found: ID does not exist" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.911925 4863 scope.go:117] "RemoveContainer" containerID="5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353" Dec 05 09:32:38 crc kubenswrapper[4863]: E1205 09:32:38.912289 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353\": container with ID starting with 5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353 not found: ID does not exist" containerID="5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353" Dec 05 09:32:38 crc kubenswrapper[4863]: I1205 09:32:38.912363 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353"} err="failed to get container status \"5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353\": rpc error: code = NotFound desc = could not find container \"5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353\": container with ID starting with 5348732a2e1e85f0a51fcd6d649ca6790c3ed97b6feefd3b0b15440ea1a65353 not found: ID does not exist" Dec 05 09:32:40 crc kubenswrapper[4863]: I1205 09:32:40.637929 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd4052ac-e5da-496f-859a-5754c6748098" path="/var/lib/kubelet/pods/fd4052ac-e5da-496f-859a-5754c6748098/volumes" Dec 05 09:32:44 crc kubenswrapper[4863]: I1205 09:32:44.602503 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:32:44 crc kubenswrapper[4863]: E1205 09:32:44.603325 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:52 crc kubenswrapper[4863]: I1205 09:32:52.448900 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/util/0.log" Dec 05 09:32:52 crc kubenswrapper[4863]: I1205 09:32:52.633826 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/util/0.log" Dec 05 09:32:52 crc kubenswrapper[4863]: I1205 09:32:52.670137 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/pull/0.log" Dec 05 09:32:52 crc kubenswrapper[4863]: I1205 09:32:52.709965 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/pull/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.032087 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/extract/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.055862 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/pull/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.058823 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eafhn4mb_3c341744-ef7e-4887-9c70-e8143f784a9f/util/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.297318 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-gs2j5_fa841a8c-8a88-4134-977a-059db7a51e45/kube-rbac-proxy/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.375959 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2dmqx_cc74b559-6330-495d-b7a3-9582c1a0f935/kube-rbac-proxy/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.398072 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-gs2j5_fa841a8c-8a88-4134-977a-059db7a51e45/manager/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.545283 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2dmqx_cc74b559-6330-495d-b7a3-9582c1a0f935/manager/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.599848 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-q6t65_0b841d1d-9d65-41e2-9db3-687320b8d75d/kube-rbac-proxy/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.715973 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-q6t65_0b841d1d-9d65-41e2-9db3-687320b8d75d/manager/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.812105 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-jvwq8_90a9e868-2b6b-4f2a-ba48-dd03855c47d9/kube-rbac-proxy/0.log" Dec 05 09:32:53 crc kubenswrapper[4863]: I1205 09:32:53.961825 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-jvwq8_90a9e868-2b6b-4f2a-ba48-dd03855c47d9/manager/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.040671 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-fm45v_bc333aa4-954b-4d2d-8bba-8fec9631cecd/kube-rbac-proxy/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.085092 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-fm45v_bc333aa4-954b-4d2d-8bba-8fec9631cecd/manager/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.288673 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wsxg9_84668b75-595c-4382-bde5-a7561c200d50/kube-rbac-proxy/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.324797 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wsxg9_84668b75-595c-4382-bde5-a7561c200d50/manager/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.435928 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-hznfl_9cd0a9a8-0623-45f9-84e4-cdc1a6132909/kube-rbac-proxy/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.557216 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-mhtdw_95da9d4b-d38a-4a40-8e9e-282b0b9da2ef/kube-rbac-proxy/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.676176 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-mhtdw_95da9d4b-d38a-4a40-8e9e-282b0b9da2ef/manager/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.847022 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-hznfl_9cd0a9a8-0623-45f9-84e4-cdc1a6132909/manager/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.860051 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-l4gp5_47454469-2c4d-4cbd-befa-eb137b5d4a1e/kube-rbac-proxy/0.log" Dec 05 09:32:54 crc kubenswrapper[4863]: I1205 09:32:54.979001 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-l4gp5_47454469-2c4d-4cbd-befa-eb137b5d4a1e/manager/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.139221 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-bq7lw_d7017ca8-d0ed-4245-8786-be169d9dde3a/kube-rbac-proxy/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.169188 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-bq7lw_d7017ca8-d0ed-4245-8786-be169d9dde3a/manager/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.328325 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-dz59p_ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7/kube-rbac-proxy/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.399460 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-dz59p_ebcc8b51-cd3e-4ce3-9b4b-6879f22efef7/manager/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.567562 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hwzqx_dd03c712-5c00-447e-a266-4dfe71e3938a/kube-rbac-proxy/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.581072 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hwzqx_dd03c712-5c00-447e-a266-4dfe71e3938a/manager/0.log" Dec 05 09:32:55 crc kubenswrapper[4863]: I1205 09:32:55.877460 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-kk8c5_b850a070-2bf4-4163-9e18-0315e1f0b250/kube-rbac-proxy/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.275937 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-kk8c5_b850a070-2bf4-4163-9e18-0315e1f0b250/manager/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.327891 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-pcpd5_905df848-1d74-4ab5-b9a1-4660b651930b/kube-rbac-proxy/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.397819 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-pcpd5_905df848-1d74-4ab5-b9a1-4660b651930b/manager/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.510915 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55c85496f5gt8sr_3830a1fc-64ea-4860-9324-9f71dba749f3/manager/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.544193 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55c85496f5gt8sr_3830a1fc-64ea-4860-9324-9f71dba749f3/kube-rbac-proxy/0.log" Dec 05 09:32:56 crc kubenswrapper[4863]: I1205 09:32:56.601974 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:32:56 crc kubenswrapper[4863]: E1205 09:32:56.602387 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:32:57 crc kubenswrapper[4863]: I1205 09:32:57.092904 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-55b6fb9447-wqs6q_9240a28f-333f-4619-b242-1a61f05d6aca/operator/0.log" Dec 05 09:32:57 crc kubenswrapper[4863]: I1205 09:32:57.273119 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-f2hhh_0b62e8bc-0058-4440-a5ce-ab4c5a2f32d0/registry-server/0.log" Dec 05 09:32:57 crc kubenswrapper[4863]: I1205 09:32:57.357523 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jk9p4_429debdd-9c1f-4af0-905e-899a846b5711/kube-rbac-proxy/0.log" Dec 05 09:32:57 crc kubenswrapper[4863]: I1205 09:32:57.693667 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jk9p4_429debdd-9c1f-4af0-905e-899a846b5711/manager/0.log" Dec 05 09:32:57 crc kubenswrapper[4863]: I1205 09:32:57.783414 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-vbbf4_aa38ed5a-a0a4-4a7d-9220-6d093163bb5b/kube-rbac-proxy/0.log" Dec 05 09:32:58 crc kubenswrapper[4863]: I1205 09:32:58.020299 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hbld6_6cba313f-ee15-470e-8dcb-8251b6bfb52a/operator/0.log" Dec 05 09:32:58 crc kubenswrapper[4863]: I1205 09:32:58.031792 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-vbbf4_aa38ed5a-a0a4-4a7d-9220-6d093163bb5b/manager/0.log" Dec 05 09:32:58 crc kubenswrapper[4863]: I1205 09:32:58.749222 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-pr9bb_5e458dfe-ff58-4c00-b65d-69d634abf798/kube-rbac-proxy/0.log" Dec 05 09:32:58 crc kubenswrapper[4863]: I1205 09:32:58.777832 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-pr9bb_5e458dfe-ff58-4c00-b65d-69d634abf798/manager/0.log" Dec 05 09:32:58 crc kubenswrapper[4863]: I1205 09:32:58.808931 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rgdk8_b683a8d2-9503-4660-8a70-d28bc5b9f75b/kube-rbac-proxy/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.010598 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2wdhl_e17b5c94-e94d-4102-8000-fa7d3c939caf/kube-rbac-proxy/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.074394 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2wdhl_e17b5c94-e94d-4102-8000-fa7d3c939caf/manager/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.189872 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rgdk8_b683a8d2-9503-4660-8a70-d28bc5b9f75b/manager/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.206737 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54bdf956c4-thxkr_4b87a8eb-ebb1-450b-bc8c-cd307695f1c4/manager/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.253978 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-rqgb7_0c7adcb5-dc00-4705-b753-0c695c580367/manager/0.log" Dec 05 09:32:59 crc kubenswrapper[4863]: I1205 09:32:59.291300 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-rqgb7_0c7adcb5-dc00-4705-b753-0c695c580367/kube-rbac-proxy/0.log" Dec 05 09:33:07 crc kubenswrapper[4863]: I1205 09:33:07.602288 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:33:07 crc kubenswrapper[4863]: E1205 09:33:07.603017 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:33:20 crc kubenswrapper[4863]: I1205 09:33:20.602666 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:33:20 crc kubenswrapper[4863]: E1205 09:33:20.603566 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:33:21 crc kubenswrapper[4863]: I1205 09:33:21.192426 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-tdck6_d4e818aa-ebd1-4fc8-bf0f-baa83e430e50/control-plane-machine-set-operator/0.log" Dec 05 09:33:21 crc kubenswrapper[4863]: I1205 09:33:21.211593 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdkmd_9a775603-788c-43d8-92d9-b5383855ed57/kube-rbac-proxy/0.log" Dec 05 09:33:21 crc kubenswrapper[4863]: I1205 09:33:21.387801 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdkmd_9a775603-788c-43d8-92d9-b5383855ed57/machine-api-operator/0.log" Dec 05 09:33:33 crc kubenswrapper[4863]: I1205 09:33:33.601654 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:33:33 crc kubenswrapper[4863]: E1205 09:33:33.602691 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:33:34 crc kubenswrapper[4863]: I1205 09:33:34.708788 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-srsmr_b6ca1063-26d1-494c-90ba-3950f5351316/cert-manager-controller/0.log" Dec 05 09:33:34 crc kubenswrapper[4863]: I1205 09:33:34.876384 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-cdqhn_ddcfff38-07b0-4814-9e0f-a7265dd82da3/cert-manager-cainjector/0.log" Dec 05 09:33:35 crc kubenswrapper[4863]: I1205 09:33:35.177700 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-2rknr_5a50e280-47ac-43e1-9d3a-722cd11cafcc/cert-manager-webhook/0.log" Dec 05 09:33:41 crc kubenswrapper[4863]: I1205 09:33:41.619981 4863 scope.go:117] "RemoveContainer" containerID="ae2ce200225289aeefd86d477259cab2f78e42647250d2eb1ef8ec019f5a3c2a" Dec 05 09:33:41 crc kubenswrapper[4863]: I1205 09:33:41.650839 4863 scope.go:117] "RemoveContainer" containerID="bb7edabd0b203020a324d83ed150dcc599f907cbd6d1f1ddc0f78cdc6fae1a18" Dec 05 09:33:41 crc kubenswrapper[4863]: I1205 09:33:41.690441 4863 scope.go:117] "RemoveContainer" containerID="9152899a4ab8cefe097b448d78970426b9395f955197f4b47e48dfc226d4186e" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.031332 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-97tqb_0d8503fb-1cdd-41ce-94a9-916f603dd90d/nmstate-console-plugin/0.log" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.287236 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-q9rlj_55fadd2e-07b7-426a-8bb8-697beb4c6209/nmstate-handler/0.log" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.352702 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9b5nh_c68996bf-ad3d-4133-8435-f79683625548/kube-rbac-proxy/0.log" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.459448 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9b5nh_c68996bf-ad3d-4133-8435-f79683625548/nmstate-metrics/0.log" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.492074 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-48k4n_fa697669-6ae2-4536-aaf9-77d05ac2251e/nmstate-operator/0.log" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.603167 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:33:48 crc kubenswrapper[4863]: E1205 09:33:48.603580 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:33:48 crc kubenswrapper[4863]: I1205 09:33:48.673443 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-pfmd9_7816923d-7529-4ac1-830f-0bba65fe40f5/nmstate-webhook/0.log" Dec 05 09:33:59 crc kubenswrapper[4863]: I1205 09:33:59.602455 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:33:59 crc kubenswrapper[4863]: E1205 09:33:59.603278 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.000238 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-ft6gg_30337d56-e873-41ea-96ff-081bf51b8cc0/kube-rbac-proxy/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.229078 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-frr-files/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.484311 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-reloader/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.489054 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-frr-files/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.501531 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-metrics/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.511710 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-ft6gg_30337d56-e873-41ea-96ff-081bf51b8cc0/controller/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.673381 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-reloader/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.871815 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-metrics/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.874369 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-frr-files/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.899633 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-reloader/0.log" Dec 05 09:34:03 crc kubenswrapper[4863]: I1205 09:34:03.914533 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-metrics/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.102537 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-reloader/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.111127 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-frr-files/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.113502 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/cp-metrics/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.117678 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/controller/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.287781 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/frr-metrics/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.290949 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/kube-rbac-proxy/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.319875 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/kube-rbac-proxy-frr/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.532618 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/reloader/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.569787 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-7c6gb_821b5a5d-142c-4637-b602-a95bc2738916/frr-k8s-webhook-server/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.806972 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f688c6497-dlmsr_41176c68-379a-4430-aeb0-3e70be256b92/manager/0.log" Dec 05 09:34:04 crc kubenswrapper[4863]: I1205 09:34:04.917647 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-855bbdbb8d-n4cpf_5157c01b-e8a3-4f95-8f36-a41e0faa358b/webhook-server/0.log" Dec 05 09:34:05 crc kubenswrapper[4863]: I1205 09:34:05.026626 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-v9rgb_f621812d-5a52-432c-afbd-23cd824480af/kube-rbac-proxy/0.log" Dec 05 09:34:05 crc kubenswrapper[4863]: I1205 09:34:05.916398 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-v9rgb_f621812d-5a52-432c-afbd-23cd824480af/speaker/0.log" Dec 05 09:34:07 crc kubenswrapper[4863]: I1205 09:34:07.422424 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vvf5m_c0a72809-7f21-4896-a9c2-03db9fa6ecd0/frr/0.log" Dec 05 09:34:13 crc kubenswrapper[4863]: I1205 09:34:13.602807 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:34:13 crc kubenswrapper[4863]: E1205 09:34:13.603552 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.465006 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/util/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.712223 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/pull/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.723702 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/util/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.734684 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/pull/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.891113 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/util/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.905654 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/pull/0.log" Dec 05 09:34:19 crc kubenswrapper[4863]: I1205 09:34:19.938341 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5k9vr_8ff8deee-f74b-43e5-8e15-d2f5685fcb74/extract/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.132394 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/util/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.261234 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/pull/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.270619 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/util/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.300190 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/pull/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.489968 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/pull/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.518898 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/extract/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.525723 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2tdc7_a89bb643-8e8e-4e92-9faf-e3a114c3d070/util/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.669193 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/util/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.888382 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/pull/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.893577 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/util/0.log" Dec 05 09:34:20 crc kubenswrapper[4863]: I1205 09:34:20.901213 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/pull/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.083287 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/pull/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.090500 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/extract/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.103532 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210w7pnv_9c2abff9-3399-41b3-ba3b-a65c8ec5d371/util/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.270703 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/util/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.440423 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/pull/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.505399 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/pull/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.515246 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/util/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.650555 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/util/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.668851 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/extract/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.688153 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83clw7r_035ae227-1541-4b39-b3af-315e31a00f0c/pull/0.log" Dec 05 09:34:21 crc kubenswrapper[4863]: I1205 09:34:21.849144 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-utilities/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.047923 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-utilities/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.047959 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-content/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.102293 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-content/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.263463 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-content/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.282718 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/extract-utilities/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.499820 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-utilities/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.757844 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jrqll_38d25020-1ccd-4553-a6e0-959986c494aa/registry-server/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.777287 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-content/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.822298 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-utilities/0.log" Dec 05 09:34:22 crc kubenswrapper[4863]: I1205 09:34:22.840003 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-content/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.000971 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-utilities/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.082178 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/extract-content/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.131946 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8b74q_bfe6af80-8ba4-4e35-ad29-87ad44ff6ca7/marketplace-operator/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.273878 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-utilities/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.397574 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-z87r7_35dcc30f-e85d-4a14-bbe5-67efc3530c0f/registry-server/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.492340 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-utilities/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.525836 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-content/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.558827 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-content/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.715292 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-utilities/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.736100 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/extract-content/0.log" Dec 05 09:34:23 crc kubenswrapper[4863]: I1205 09:34:23.859950 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-utilities/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.158830 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-dq86d_19f2d47e-7dc6-494d-9bfc-b77ba012844c/registry-server/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.262579 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-utilities/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.329617 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-content/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.337602 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-content/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.473248 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-content/0.log" Dec 05 09:34:24 crc kubenswrapper[4863]: I1205 09:34:24.591834 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/extract-utilities/0.log" Dec 05 09:34:25 crc kubenswrapper[4863]: I1205 09:34:25.740877 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-gbnbn_5102b056-af0a-435a-9d9a-c711ec903c32/registry-server/0.log" Dec 05 09:34:26 crc kubenswrapper[4863]: I1205 09:34:26.602425 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:34:26 crc kubenswrapper[4863]: E1205 09:34:26.602980 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.010837 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-b8q8r_d7194909-83dd-41b1-af13-9a8b6212f1b3/prometheus-operator/0.log" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.210139 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7c8dc9bbbb-grn4t_f5d9bca6-aeb6-4735-a115-e20ef0d2fe4a/prometheus-operator-admission-webhook/0.log" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.273509 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7c8dc9bbbb-jvd6j_4a6c106e-c0dd-41c6-b598-fd8a1362b9ea/prometheus-operator-admission-webhook/0.log" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.413181 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-jn96f_d6f48c43-3adb-4b52-95ab-dbc35a392423/operator/0.log" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.466415 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-lpvbx_84158b8a-71ed-4dc1-8789-83583fa243d4/perses-operator/0.log" Dec 05 09:34:38 crc kubenswrapper[4863]: I1205 09:34:38.602271 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:34:38 crc kubenswrapper[4863]: E1205 09:34:38.602617 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:34:49 crc kubenswrapper[4863]: I1205 09:34:49.602762 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:34:49 crc kubenswrapper[4863]: E1205 09:34:49.603765 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:35:00 crc kubenswrapper[4863]: E1205 09:35:00.111445 4863 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.106:37860->38.102.83.106:33381: write tcp 38.102.83.106:37860->38.102.83.106:33381: write: broken pipe Dec 05 09:35:00 crc kubenswrapper[4863]: E1205 09:35:00.379927 4863 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.106:36720->38.102.83.106:33381: write tcp 38.102.83.106:36720->38.102.83.106:33381: write: broken pipe Dec 05 09:35:04 crc kubenswrapper[4863]: I1205 09:35:04.604081 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:35:04 crc kubenswrapper[4863]: E1205 09:35:04.605193 4863 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnkj9_openshift-machine-config-operator(b6dd012a-040f-4504-9866-21443f9165d4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" Dec 05 09:35:18 crc kubenswrapper[4863]: I1205 09:35:18.602841 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:35:19 crc kubenswrapper[4863]: I1205 09:35:19.322041 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"cd372486b63f9261dc4f759bc8ff229e96fb2c2e21ed5172ce6b0ddee503741d"} Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.822583 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:20 crc kubenswrapper[4863]: E1205 09:35:20.824731 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="registry-server" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.824755 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="registry-server" Dec 05 09:35:20 crc kubenswrapper[4863]: E1205 09:35:20.824778 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="extract-content" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.824787 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="extract-content" Dec 05 09:35:20 crc kubenswrapper[4863]: E1205 09:35:20.824874 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="extract-utilities" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.824885 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="extract-utilities" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.825527 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4052ac-e5da-496f-859a-5754c6748098" containerName="registry-server" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.829289 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.862913 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.936037 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.936103 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:20 crc kubenswrapper[4863]: I1205 09:35:20.936240 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm4hn\" (UniqueName: \"kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.038701 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.039285 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm4hn\" (UniqueName: \"kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.039383 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.039669 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.039890 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.068407 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm4hn\" (UniqueName: \"kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn\") pod \"redhat-operators-tsqn6\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.173410 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:21 crc kubenswrapper[4863]: I1205 09:35:21.706675 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:22 crc kubenswrapper[4863]: I1205 09:35:22.355791 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b237a80-bee1-4fef-b885-1de39a09153c" containerID="9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6" exitCode=0 Dec 05 09:35:22 crc kubenswrapper[4863]: I1205 09:35:22.356047 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerDied","Data":"9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6"} Dec 05 09:35:22 crc kubenswrapper[4863]: I1205 09:35:22.356074 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerStarted","Data":"23f326562585ee25877d95ecd80c9ae4d9ee2edc964e5010cb7e2a5ce02a3ac8"} Dec 05 09:35:23 crc kubenswrapper[4863]: I1205 09:35:23.366915 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerStarted","Data":"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031"} Dec 05 09:35:27 crc kubenswrapper[4863]: I1205 09:35:27.404492 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b237a80-bee1-4fef-b885-1de39a09153c" containerID="b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031" exitCode=0 Dec 05 09:35:27 crc kubenswrapper[4863]: I1205 09:35:27.404941 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerDied","Data":"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031"} Dec 05 09:35:28 crc kubenswrapper[4863]: I1205 09:35:28.419925 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerStarted","Data":"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9"} Dec 05 09:35:28 crc kubenswrapper[4863]: I1205 09:35:28.439117 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tsqn6" podStartSLOduration=2.712350218 podStartE2EDuration="8.439100234s" podCreationTimestamp="2025-12-05 09:35:20 +0000 UTC" firstStartedPulling="2025-12-05 09:35:22.357968485 +0000 UTC m=+10150.083965525" lastFinishedPulling="2025-12-05 09:35:28.084718491 +0000 UTC m=+10155.810715541" observedRunningTime="2025-12-05 09:35:28.438678414 +0000 UTC m=+10156.164675474" watchObservedRunningTime="2025-12-05 09:35:28.439100234 +0000 UTC m=+10156.165097274" Dec 05 09:35:31 crc kubenswrapper[4863]: I1205 09:35:31.174284 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:31 crc kubenswrapper[4863]: I1205 09:35:31.175935 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:32 crc kubenswrapper[4863]: I1205 09:35:32.225601 4863 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tsqn6" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="registry-server" probeResult="failure" output=< Dec 05 09:35:32 crc kubenswrapper[4863]: timeout: failed to connect service ":50051" within 1s Dec 05 09:35:32 crc kubenswrapper[4863]: > Dec 05 09:35:41 crc kubenswrapper[4863]: I1205 09:35:41.221158 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:41 crc kubenswrapper[4863]: I1205 09:35:41.273708 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:41 crc kubenswrapper[4863]: I1205 09:35:41.469061 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:42 crc kubenswrapper[4863]: I1205 09:35:42.548108 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tsqn6" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="registry-server" containerID="cri-o://a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9" gracePeriod=2 Dec 05 09:35:42 crc kubenswrapper[4863]: I1205 09:35:42.971377 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.128382 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities\") pod \"6b237a80-bee1-4fef-b885-1de39a09153c\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.128674 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content\") pod \"6b237a80-bee1-4fef-b885-1de39a09153c\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.128997 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vm4hn\" (UniqueName: \"kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn\") pod \"6b237a80-bee1-4fef-b885-1de39a09153c\" (UID: \"6b237a80-bee1-4fef-b885-1de39a09153c\") " Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.129407 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities" (OuterVolumeSpecName: "utilities") pod "6b237a80-bee1-4fef-b885-1de39a09153c" (UID: "6b237a80-bee1-4fef-b885-1de39a09153c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.129591 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.136820 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn" (OuterVolumeSpecName: "kube-api-access-vm4hn") pod "6b237a80-bee1-4fef-b885-1de39a09153c" (UID: "6b237a80-bee1-4fef-b885-1de39a09153c"). InnerVolumeSpecName "kube-api-access-vm4hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.231136 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vm4hn\" (UniqueName: \"kubernetes.io/projected/6b237a80-bee1-4fef-b885-1de39a09153c-kube-api-access-vm4hn\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.247222 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b237a80-bee1-4fef-b885-1de39a09153c" (UID: "6b237a80-bee1-4fef-b885-1de39a09153c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.333208 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b237a80-bee1-4fef-b885-1de39a09153c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.558209 4863 generic.go:334] "Generic (PLEG): container finished" podID="6b237a80-bee1-4fef-b885-1de39a09153c" containerID="a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9" exitCode=0 Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.558250 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerDied","Data":"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9"} Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.558263 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsqn6" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.558280 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsqn6" event={"ID":"6b237a80-bee1-4fef-b885-1de39a09153c","Type":"ContainerDied","Data":"23f326562585ee25877d95ecd80c9ae4d9ee2edc964e5010cb7e2a5ce02a3ac8"} Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.558297 4863 scope.go:117] "RemoveContainer" containerID="a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.595836 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.602647 4863 scope.go:117] "RemoveContainer" containerID="b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.614968 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tsqn6"] Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.626668 4863 scope.go:117] "RemoveContainer" containerID="9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.669606 4863 scope.go:117] "RemoveContainer" containerID="a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9" Dec 05 09:35:43 crc kubenswrapper[4863]: E1205 09:35:43.670132 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9\": container with ID starting with a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9 not found: ID does not exist" containerID="a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.670185 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9"} err="failed to get container status \"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9\": rpc error: code = NotFound desc = could not find container \"a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9\": container with ID starting with a23c2609c794c23232a31a630f4b1725edabd3c6cb39641a2d300a27c7ba68c9 not found: ID does not exist" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.670214 4863 scope.go:117] "RemoveContainer" containerID="b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031" Dec 05 09:35:43 crc kubenswrapper[4863]: E1205 09:35:43.670644 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031\": container with ID starting with b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031 not found: ID does not exist" containerID="b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.670685 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031"} err="failed to get container status \"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031\": rpc error: code = NotFound desc = could not find container \"b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031\": container with ID starting with b82cb03eb5129fd4fb415334351a00bd341412dda3462807c5db75158cd75031 not found: ID does not exist" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.670713 4863 scope.go:117] "RemoveContainer" containerID="9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6" Dec 05 09:35:43 crc kubenswrapper[4863]: E1205 09:35:43.670954 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6\": container with ID starting with 9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6 not found: ID does not exist" containerID="9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6" Dec 05 09:35:43 crc kubenswrapper[4863]: I1205 09:35:43.670983 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6"} err="failed to get container status \"9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6\": rpc error: code = NotFound desc = could not find container \"9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6\": container with ID starting with 9ac8d94db0a17cee00fd032d804bd70270b3d3d739aa87f5377bec51010271d6 not found: ID does not exist" Dec 05 09:35:44 crc kubenswrapper[4863]: I1205 09:35:44.614583 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" path="/var/lib/kubelet/pods/6b237a80-bee1-4fef-b885-1de39a09153c/volumes" Dec 05 09:36:44 crc kubenswrapper[4863]: I1205 09:36:44.183294 4863 generic.go:334] "Generic (PLEG): container finished" podID="43d554cd-5fca-4add-804b-b86db0db2896" containerID="56299d7c00b8a920b410c29d459310ce93e37006e4cfe4d8b1a092a0ad6c303e" exitCode=0 Dec 05 09:36:44 crc kubenswrapper[4863]: I1205 09:36:44.183410 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" event={"ID":"43d554cd-5fca-4add-804b-b86db0db2896","Type":"ContainerDied","Data":"56299d7c00b8a920b410c29d459310ce93e37006e4cfe4d8b1a092a0ad6c303e"} Dec 05 09:36:44 crc kubenswrapper[4863]: I1205 09:36:44.184735 4863 scope.go:117] "RemoveContainer" containerID="56299d7c00b8a920b410c29d459310ce93e37006e4cfe4d8b1a092a0ad6c303e" Dec 05 09:36:44 crc kubenswrapper[4863]: I1205 09:36:44.454413 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8jrh_must-gather-q4dv8_43d554cd-5fca-4add-804b-b86db0db2896/gather/0.log" Dec 05 09:36:54 crc kubenswrapper[4863]: I1205 09:36:54.441707 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8jrh/must-gather-q4dv8"] Dec 05 09:36:54 crc kubenswrapper[4863]: I1205 09:36:54.442351 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="copy" containerID="cri-o://66c36a39e8d024ca07460b431513f9255324f9ea84a0d180c09b5a343cae355e" gracePeriod=2 Dec 05 09:36:54 crc kubenswrapper[4863]: I1205 09:36:54.453247 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8jrh/must-gather-q4dv8"] Dec 05 09:36:55 crc kubenswrapper[4863]: I1205 09:36:55.300671 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8jrh_must-gather-q4dv8_43d554cd-5fca-4add-804b-b86db0db2896/copy/0.log" Dec 05 09:36:55 crc kubenswrapper[4863]: I1205 09:36:55.301375 4863 generic.go:334] "Generic (PLEG): container finished" podID="43d554cd-5fca-4add-804b-b86db0db2896" containerID="66c36a39e8d024ca07460b431513f9255324f9ea84a0d180c09b5a343cae355e" exitCode=143 Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.042787 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8jrh_must-gather-q4dv8_43d554cd-5fca-4add-804b-b86db0db2896/copy/0.log" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.043596 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.197630 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b5k8\" (UniqueName: \"kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8\") pod \"43d554cd-5fca-4add-804b-b86db0db2896\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.197915 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output\") pod \"43d554cd-5fca-4add-804b-b86db0db2896\" (UID: \"43d554cd-5fca-4add-804b-b86db0db2896\") " Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.207748 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8" (OuterVolumeSpecName: "kube-api-access-8b5k8") pod "43d554cd-5fca-4add-804b-b86db0db2896" (UID: "43d554cd-5fca-4add-804b-b86db0db2896"). InnerVolumeSpecName "kube-api-access-8b5k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.314554 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b5k8\" (UniqueName: \"kubernetes.io/projected/43d554cd-5fca-4add-804b-b86db0db2896-kube-api-access-8b5k8\") on node \"crc\" DevicePath \"\"" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.424263 4863 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8jrh_must-gather-q4dv8_43d554cd-5fca-4add-804b-b86db0db2896/copy/0.log" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.426182 4863 scope.go:117] "RemoveContainer" containerID="66c36a39e8d024ca07460b431513f9255324f9ea84a0d180c09b5a343cae355e" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.426355 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8jrh/must-gather-q4dv8" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.473920 4863 scope.go:117] "RemoveContainer" containerID="56299d7c00b8a920b410c29d459310ce93e37006e4cfe4d8b1a092a0ad6c303e" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.570023 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "43d554cd-5fca-4add-804b-b86db0db2896" (UID: "43d554cd-5fca-4add-804b-b86db0db2896"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.616132 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43d554cd-5fca-4add-804b-b86db0db2896" path="/var/lib/kubelet/pods/43d554cd-5fca-4add-804b-b86db0db2896/volumes" Dec 05 09:36:56 crc kubenswrapper[4863]: I1205 09:36:56.628422 4863 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/43d554cd-5fca-4add-804b-b86db0db2896-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:38 crc kubenswrapper[4863]: I1205 09:37:38.464654 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:37:38 crc kubenswrapper[4863]: I1205 09:37:38.465718 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.299550 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:41 crc kubenswrapper[4863]: E1205 09:37:41.303256 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="extract-content" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303313 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="extract-content" Dec 05 09:37:41 crc kubenswrapper[4863]: E1205 09:37:41.303354 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="copy" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303365 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="copy" Dec 05 09:37:41 crc kubenswrapper[4863]: E1205 09:37:41.303394 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="extract-utilities" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303401 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="extract-utilities" Dec 05 09:37:41 crc kubenswrapper[4863]: E1205 09:37:41.303417 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="gather" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303424 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="gather" Dec 05 09:37:41 crc kubenswrapper[4863]: E1205 09:37:41.303439 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="registry-server" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303445 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="registry-server" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303654 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b237a80-bee1-4fef-b885-1de39a09153c" containerName="registry-server" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303678 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="copy" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.303699 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="43d554cd-5fca-4add-804b-b86db0db2896" containerName="gather" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.305335 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.319573 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.438813 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.439133 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.439402 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvlc9\" (UniqueName: \"kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.540970 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.541024 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.541136 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvlc9\" (UniqueName: \"kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.541519 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.541880 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.569160 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvlc9\" (UniqueName: \"kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9\") pod \"certified-operators-mm8hx\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:41 crc kubenswrapper[4863]: I1205 09:37:41.678280 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:42 crc kubenswrapper[4863]: I1205 09:37:42.198118 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:42 crc kubenswrapper[4863]: I1205 09:37:42.902557 4863 generic.go:334] "Generic (PLEG): container finished" podID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerID="d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79" exitCode=0 Dec 05 09:37:42 crc kubenswrapper[4863]: I1205 09:37:42.902620 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerDied","Data":"d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79"} Dec 05 09:37:42 crc kubenswrapper[4863]: I1205 09:37:42.902948 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerStarted","Data":"5202828c00d1915e37b83c16a0f5e07375172acc8d563cee52f23d246561fe8c"} Dec 05 09:37:42 crc kubenswrapper[4863]: I1205 09:37:42.906627 4863 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:37:43 crc kubenswrapper[4863]: I1205 09:37:43.914534 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerStarted","Data":"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921"} Dec 05 09:37:44 crc kubenswrapper[4863]: I1205 09:37:44.924398 4863 generic.go:334] "Generic (PLEG): container finished" podID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerID="f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921" exitCode=0 Dec 05 09:37:44 crc kubenswrapper[4863]: I1205 09:37:44.924656 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerDied","Data":"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921"} Dec 05 09:37:45 crc kubenswrapper[4863]: I1205 09:37:45.937180 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerStarted","Data":"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44"} Dec 05 09:37:45 crc kubenswrapper[4863]: I1205 09:37:45.958882 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mm8hx" podStartSLOduration=2.519576811 podStartE2EDuration="4.958855521s" podCreationTimestamp="2025-12-05 09:37:41 +0000 UTC" firstStartedPulling="2025-12-05 09:37:42.906255688 +0000 UTC m=+10290.632252728" lastFinishedPulling="2025-12-05 09:37:45.345534388 +0000 UTC m=+10293.071531438" observedRunningTime="2025-12-05 09:37:45.95719543 +0000 UTC m=+10293.683192480" watchObservedRunningTime="2025-12-05 09:37:45.958855521 +0000 UTC m=+10293.684852561" Dec 05 09:37:51 crc kubenswrapper[4863]: I1205 09:37:51.678681 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:51 crc kubenswrapper[4863]: I1205 09:37:51.679252 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:51 crc kubenswrapper[4863]: I1205 09:37:51.728552 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:52 crc kubenswrapper[4863]: I1205 09:37:52.524993 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:52 crc kubenswrapper[4863]: I1205 09:37:52.574965 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.004605 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mm8hx" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="registry-server" containerID="cri-o://420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44" gracePeriod=2 Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.492805 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.625371 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvlc9\" (UniqueName: \"kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9\") pod \"af3041f0-e983-45c1-a7b0-9de2d80289e0\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.625457 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content\") pod \"af3041f0-e983-45c1-a7b0-9de2d80289e0\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.625555 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities\") pod \"af3041f0-e983-45c1-a7b0-9de2d80289e0\" (UID: \"af3041f0-e983-45c1-a7b0-9de2d80289e0\") " Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.626437 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities" (OuterVolumeSpecName: "utilities") pod "af3041f0-e983-45c1-a7b0-9de2d80289e0" (UID: "af3041f0-e983-45c1-a7b0-9de2d80289e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.632422 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9" (OuterVolumeSpecName: "kube-api-access-lvlc9") pod "af3041f0-e983-45c1-a7b0-9de2d80289e0" (UID: "af3041f0-e983-45c1-a7b0-9de2d80289e0"). InnerVolumeSpecName "kube-api-access-lvlc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.704601 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af3041f0-e983-45c1-a7b0-9de2d80289e0" (UID: "af3041f0-e983-45c1-a7b0-9de2d80289e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.728094 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvlc9\" (UniqueName: \"kubernetes.io/projected/af3041f0-e983-45c1-a7b0-9de2d80289e0-kube-api-access-lvlc9\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.729031 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:54 crc kubenswrapper[4863]: I1205 09:37:54.729209 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3041f0-e983-45c1-a7b0-9de2d80289e0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.015301 4863 generic.go:334] "Generic (PLEG): container finished" podID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerID="420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44" exitCode=0 Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.015360 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerDied","Data":"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44"} Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.015374 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm8hx" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.015393 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm8hx" event={"ID":"af3041f0-e983-45c1-a7b0-9de2d80289e0","Type":"ContainerDied","Data":"5202828c00d1915e37b83c16a0f5e07375172acc8d563cee52f23d246561fe8c"} Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.015427 4863 scope.go:117] "RemoveContainer" containerID="420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.042646 4863 scope.go:117] "RemoveContainer" containerID="f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.068704 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.076368 4863 scope.go:117] "RemoveContainer" containerID="d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.079689 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mm8hx"] Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.135689 4863 scope.go:117] "RemoveContainer" containerID="420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44" Dec 05 09:37:55 crc kubenswrapper[4863]: E1205 09:37:55.139546 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44\": container with ID starting with 420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44 not found: ID does not exist" containerID="420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.139587 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44"} err="failed to get container status \"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44\": rpc error: code = NotFound desc = could not find container \"420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44\": container with ID starting with 420a5a96055596e16fb22b57bb48a6d11931060060da1460bf24363b1f706b44 not found: ID does not exist" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.139622 4863 scope.go:117] "RemoveContainer" containerID="f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921" Dec 05 09:37:55 crc kubenswrapper[4863]: E1205 09:37:55.140120 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921\": container with ID starting with f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921 not found: ID does not exist" containerID="f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.140142 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921"} err="failed to get container status \"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921\": rpc error: code = NotFound desc = could not find container \"f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921\": container with ID starting with f325ee1e51ff16430ebad2fe23771432b5ccd7380fabe20c71e15198a6faf921 not found: ID does not exist" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.140252 4863 scope.go:117] "RemoveContainer" containerID="d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79" Dec 05 09:37:55 crc kubenswrapper[4863]: E1205 09:37:55.140596 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79\": container with ID starting with d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79 not found: ID does not exist" containerID="d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79" Dec 05 09:37:55 crc kubenswrapper[4863]: I1205 09:37:55.140646 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79"} err="failed to get container status \"d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79\": rpc error: code = NotFound desc = could not find container \"d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79\": container with ID starting with d601b87d7915c70602c5438e3377a41dad6ccf2c1925481004eeba952a289a79 not found: ID does not exist" Dec 05 09:37:56 crc kubenswrapper[4863]: I1205 09:37:56.618051 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" path="/var/lib/kubelet/pods/af3041f0-e983-45c1-a7b0-9de2d80289e0/volumes" Dec 05 09:38:08 crc kubenswrapper[4863]: I1205 09:38:08.463714 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:38:08 crc kubenswrapper[4863]: I1205 09:38:08.464247 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.118755 4863 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:11 crc kubenswrapper[4863]: E1205 09:38:11.119699 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="extract-utilities" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.119717 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="extract-utilities" Dec 05 09:38:11 crc kubenswrapper[4863]: E1205 09:38:11.119733 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="registry-server" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.119742 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="registry-server" Dec 05 09:38:11 crc kubenswrapper[4863]: E1205 09:38:11.119786 4863 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="extract-content" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.119797 4863 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="extract-content" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.120047 4863 memory_manager.go:354] "RemoveStaleState removing state" podUID="af3041f0-e983-45c1-a7b0-9de2d80289e0" containerName="registry-server" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.122002 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.130834 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.188742 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.188817 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hrc2\" (UniqueName: \"kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.188865 4863 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.290395 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.290611 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.290667 4863 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hrc2\" (UniqueName: \"kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.290853 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.291119 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.310551 4863 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hrc2\" (UniqueName: \"kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2\") pod \"community-operators-jwtz7\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.458959 4863 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:11 crc kubenswrapper[4863]: I1205 09:38:11.975680 4863 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:12 crc kubenswrapper[4863]: W1205 09:38:12.184616 4863 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode31a6d39_2fbe_433c_a48b_82ace90bef14.slice/crio-82d9fe2bafd0c1aa5025eb0f27d436365d47d8529c01cba49600a3c526251537 WatchSource:0}: Error finding container 82d9fe2bafd0c1aa5025eb0f27d436365d47d8529c01cba49600a3c526251537: Status 404 returned error can't find the container with id 82d9fe2bafd0c1aa5025eb0f27d436365d47d8529c01cba49600a3c526251537 Dec 05 09:38:13 crc kubenswrapper[4863]: I1205 09:38:13.189676 4863 generic.go:334] "Generic (PLEG): container finished" podID="e31a6d39-2fbe-433c-a48b-82ace90bef14" containerID="3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b" exitCode=0 Dec 05 09:38:13 crc kubenswrapper[4863]: I1205 09:38:13.189755 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerDied","Data":"3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b"} Dec 05 09:38:13 crc kubenswrapper[4863]: I1205 09:38:13.190003 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerStarted","Data":"82d9fe2bafd0c1aa5025eb0f27d436365d47d8529c01cba49600a3c526251537"} Dec 05 09:38:14 crc kubenswrapper[4863]: I1205 09:38:14.200217 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerStarted","Data":"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc"} Dec 05 09:38:15 crc kubenswrapper[4863]: I1205 09:38:15.225436 4863 generic.go:334] "Generic (PLEG): container finished" podID="e31a6d39-2fbe-433c-a48b-82ace90bef14" containerID="d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc" exitCode=0 Dec 05 09:38:15 crc kubenswrapper[4863]: I1205 09:38:15.225517 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerDied","Data":"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc"} Dec 05 09:38:16 crc kubenswrapper[4863]: I1205 09:38:16.237708 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerStarted","Data":"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15"} Dec 05 09:38:16 crc kubenswrapper[4863]: I1205 09:38:16.266337 4863 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jwtz7" podStartSLOduration=2.851278971 podStartE2EDuration="5.266318761s" podCreationTimestamp="2025-12-05 09:38:11 +0000 UTC" firstStartedPulling="2025-12-05 09:38:13.192097464 +0000 UTC m=+10320.918094504" lastFinishedPulling="2025-12-05 09:38:15.607137254 +0000 UTC m=+10323.333134294" observedRunningTime="2025-12-05 09:38:16.258972389 +0000 UTC m=+10323.984969429" watchObservedRunningTime="2025-12-05 09:38:16.266318761 +0000 UTC m=+10323.992315801" Dec 05 09:38:21 crc kubenswrapper[4863]: I1205 09:38:21.459648 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:21 crc kubenswrapper[4863]: I1205 09:38:21.460174 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:21 crc kubenswrapper[4863]: I1205 09:38:21.526245 4863 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:22 crc kubenswrapper[4863]: I1205 09:38:22.337520 4863 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:22 crc kubenswrapper[4863]: I1205 09:38:22.386153 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.307908 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jwtz7" podUID="e31a6d39-2fbe-433c-a48b-82ace90bef14" containerName="registry-server" containerID="cri-o://bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15" gracePeriod=2 Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.912714 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.997016 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content\") pod \"e31a6d39-2fbe-433c-a48b-82ace90bef14\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.997090 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities\") pod \"e31a6d39-2fbe-433c-a48b-82ace90bef14\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.997164 4863 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hrc2\" (UniqueName: \"kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2\") pod \"e31a6d39-2fbe-433c-a48b-82ace90bef14\" (UID: \"e31a6d39-2fbe-433c-a48b-82ace90bef14\") " Dec 05 09:38:24 crc kubenswrapper[4863]: I1205 09:38:24.997852 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities" (OuterVolumeSpecName: "utilities") pod "e31a6d39-2fbe-433c-a48b-82ace90bef14" (UID: "e31a6d39-2fbe-433c-a48b-82ace90bef14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.050691 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e31a6d39-2fbe-433c-a48b-82ace90bef14" (UID: "e31a6d39-2fbe-433c-a48b-82ace90bef14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.099794 4863 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.099838 4863 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31a6d39-2fbe-433c-a48b-82ace90bef14-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.319779 4863 generic.go:334] "Generic (PLEG): container finished" podID="e31a6d39-2fbe-433c-a48b-82ace90bef14" containerID="bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15" exitCode=0 Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.319818 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerDied","Data":"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15"} Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.319838 4863 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwtz7" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.319855 4863 scope.go:117] "RemoveContainer" containerID="bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.319844 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwtz7" event={"ID":"e31a6d39-2fbe-433c-a48b-82ace90bef14","Type":"ContainerDied","Data":"82d9fe2bafd0c1aa5025eb0f27d436365d47d8529c01cba49600a3c526251537"} Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.337992 4863 scope.go:117] "RemoveContainer" containerID="d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.481537 4863 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2" (OuterVolumeSpecName: "kube-api-access-2hrc2") pod "e31a6d39-2fbe-433c-a48b-82ace90bef14" (UID: "e31a6d39-2fbe-433c-a48b-82ace90bef14"). InnerVolumeSpecName "kube-api-access-2hrc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.498669 4863 scope.go:117] "RemoveContainer" containerID="3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.508405 4863 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hrc2\" (UniqueName: \"kubernetes.io/projected/e31a6d39-2fbe-433c-a48b-82ace90bef14-kube-api-access-2hrc2\") on node \"crc\" DevicePath \"\"" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.589567 4863 scope.go:117] "RemoveContainer" containerID="bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15" Dec 05 09:38:25 crc kubenswrapper[4863]: E1205 09:38:25.590006 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15\": container with ID starting with bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15 not found: ID does not exist" containerID="bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.590107 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15"} err="failed to get container status \"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15\": rpc error: code = NotFound desc = could not find container \"bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15\": container with ID starting with bf865591b4dcc00bdf63bdb863f9c4bb684da2b11945ce8cbf050bfb72441b15 not found: ID does not exist" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.590134 4863 scope.go:117] "RemoveContainer" containerID="d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc" Dec 05 09:38:25 crc kubenswrapper[4863]: E1205 09:38:25.590458 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc\": container with ID starting with d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc not found: ID does not exist" containerID="d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.590527 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc"} err="failed to get container status \"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc\": rpc error: code = NotFound desc = could not find container \"d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc\": container with ID starting with d8fd0e7c06f3f3ee15e17e177458f68c85cd9db1d7a68f35874b0539ed5911cc not found: ID does not exist" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.590553 4863 scope.go:117] "RemoveContainer" containerID="3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b" Dec 05 09:38:25 crc kubenswrapper[4863]: E1205 09:38:25.590913 4863 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b\": container with ID starting with 3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b not found: ID does not exist" containerID="3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.590956 4863 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b"} err="failed to get container status \"3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b\": rpc error: code = NotFound desc = could not find container \"3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b\": container with ID starting with 3bfd9c030af80160309148dc6be0a22351ad9483d05bdd77aff8a86f079a5c0b not found: ID does not exist" Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.759966 4863 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:25 crc kubenswrapper[4863]: I1205 09:38:25.770956 4863 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jwtz7"] Dec 05 09:38:26 crc kubenswrapper[4863]: I1205 09:38:26.614585 4863 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e31a6d39-2fbe-433c-a48b-82ace90bef14" path="/var/lib/kubelet/pods/e31a6d39-2fbe-433c-a48b-82ace90bef14/volumes" Dec 05 09:38:38 crc kubenswrapper[4863]: I1205 09:38:38.464137 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:38:38 crc kubenswrapper[4863]: I1205 09:38:38.465034 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:38:38 crc kubenswrapper[4863]: I1205 09:38:38.465103 4863 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" Dec 05 09:38:38 crc kubenswrapper[4863]: I1205 09:38:38.466316 4863 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd372486b63f9261dc4f759bc8ff229e96fb2c2e21ed5172ce6b0ddee503741d"} pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:38:38 crc kubenswrapper[4863]: I1205 09:38:38.466445 4863 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" containerID="cri-o://cd372486b63f9261dc4f759bc8ff229e96fb2c2e21ed5172ce6b0ddee503741d" gracePeriod=600 Dec 05 09:38:39 crc kubenswrapper[4863]: I1205 09:38:39.473463 4863 generic.go:334] "Generic (PLEG): container finished" podID="b6dd012a-040f-4504-9866-21443f9165d4" containerID="cd372486b63f9261dc4f759bc8ff229e96fb2c2e21ed5172ce6b0ddee503741d" exitCode=0 Dec 05 09:38:39 crc kubenswrapper[4863]: I1205 09:38:39.473512 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerDied","Data":"cd372486b63f9261dc4f759bc8ff229e96fb2c2e21ed5172ce6b0ddee503741d"} Dec 05 09:38:39 crc kubenswrapper[4863]: I1205 09:38:39.473976 4863 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" event={"ID":"b6dd012a-040f-4504-9866-21443f9165d4","Type":"ContainerStarted","Data":"3bf82bf9a6108298b3681025b3ab8e70fe536779d00dade107b993cb59006d3c"} Dec 05 09:38:39 crc kubenswrapper[4863]: I1205 09:38:39.473993 4863 scope.go:117] "RemoveContainer" containerID="c89a38603c9ca7e31e8fbec969ce9a1a504cf9fed0479dd8220895d95a32c503" Dec 05 09:40:38 crc kubenswrapper[4863]: I1205 09:40:38.464202 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:40:38 crc kubenswrapper[4863]: I1205 09:40:38.464783 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:41:08 crc kubenswrapper[4863]: I1205 09:41:08.464725 4863 patch_prober.go:28] interesting pod/machine-config-daemon-mnkj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:41:08 crc kubenswrapper[4863]: I1205 09:41:08.465253 4863 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnkj9" podUID="b6dd012a-040f-4504-9866-21443f9165d4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114524275024453 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114524275017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114477327016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114477327015470 5ustar corecore